mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
Image resource refactor
This commit pulls most of the image related logic into its own package, to make it easier to reason about and extend. This is also a rewrite of the transformation logic used in Hugo Pipes, mostly to allow constructs like the one below: {{ ($myimg | fingerprint ).Width }} Fixes #5903 Fixes #6234 Fixes #6266
This commit is contained in:
parent
58d4c0a8be
commit
f9978ed164
34 changed files with 2674 additions and 1556 deletions
|
@ -52,6 +52,7 @@ func FprintStackTrace(w io.Writer, err error) {
|
||||||
// defer herrors.Recover()
|
// defer herrors.Recover()
|
||||||
func Recover(args ...interface{}) {
|
func Recover(args ...interface{}) {
|
||||||
if r := recover(); r != nil {
|
if r := recover(); r != nil {
|
||||||
|
fmt.Println("ERR:", r)
|
||||||
args = append(args, "stacktrace from panic: \n"+string(debug.Stack()), "\n")
|
args = append(args, "stacktrace from panic: \n"+string(debug.Stack()), "\n")
|
||||||
fmt.Println(args...)
|
fmt.Println(args...)
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,8 +14,10 @@
|
||||||
package htesting
|
package htesting
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"math/rand"
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
)
|
)
|
||||||
|
@ -37,3 +39,20 @@ func CreateTempDir(fs afero.Fs, prefix string) (string, func(), error) {
|
||||||
}
|
}
|
||||||
return tempDir, func() { fs.RemoveAll(tempDir) }, nil
|
return tempDir, func() { fs.RemoveAll(tempDir) }, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// BailOut panics with a stack trace after the given duration. Useful for
|
||||||
|
// hanging tests.
|
||||||
|
func BailOut(after time.Duration) {
|
||||||
|
time.AfterFunc(after, func() {
|
||||||
|
buf := make([]byte, 1<<16)
|
||||||
|
runtime.Stack(buf, true)
|
||||||
|
panic(string(buf))
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
var rnd = rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||||
|
|
||||||
|
func RandIntn(n int) int {
|
||||||
|
return rnd.Intn(n)
|
||||||
|
}
|
||||||
|
|
BIN
hugolib/assets/images/sunset.jpg
Normal file
BIN
hugolib/assets/images/sunset.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 88 KiB |
|
@ -42,8 +42,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestPageBundlerSiteRegular(t *testing.T) {
|
func TestPageBundlerSiteRegular(t *testing.T) {
|
||||||
t.Parallel()
|
c := qt.New(t)
|
||||||
|
|
||||||
baseBaseURL := "https://example.com"
|
baseBaseURL := "https://example.com"
|
||||||
|
|
||||||
for _, baseURLPath := range []string{"", "/hugo"} {
|
for _, baseURLPath := range []string{"", "/hugo"} {
|
||||||
|
@ -55,15 +54,14 @@ func TestPageBundlerSiteRegular(t *testing.T) {
|
||||||
}
|
}
|
||||||
ugly := ugly
|
ugly := ugly
|
||||||
canonify := canonify
|
canonify := canonify
|
||||||
t.Run(fmt.Sprintf("ugly=%t,canonify=%t,path=%s", ugly, canonify, baseURLPathId),
|
c.Run(fmt.Sprintf("ugly=%t,canonify=%t,path=%s", ugly, canonify, baseURLPathId),
|
||||||
func(t *testing.T) {
|
func(c *qt.C) {
|
||||||
t.Parallel()
|
c.Parallel()
|
||||||
baseURL := baseBaseURL + baseURLPath
|
baseURL := baseBaseURL + baseURLPath
|
||||||
relURLBase := baseURLPath
|
relURLBase := baseURLPath
|
||||||
if canonify {
|
if canonify {
|
||||||
relURLBase = ""
|
relURLBase = ""
|
||||||
}
|
}
|
||||||
c := qt.New(t)
|
|
||||||
fs, cfg := newTestBundleSources(t)
|
fs, cfg := newTestBundleSources(t)
|
||||||
cfg.Set("baseURL", baseURL)
|
cfg.Set("baseURL", baseURL)
|
||||||
cfg.Set("canonifyURLs", canonify)
|
cfg.Set("canonifyURLs", canonify)
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -167,6 +168,64 @@ T1: {{ $r.Content }}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestResourceChainBasic(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
b := newTestSitesBuilder(t)
|
||||||
|
b.WithTemplatesAdded("index.html", `
|
||||||
|
{{ $hello := "<h1> Hello World! </h1>" | resources.FromString "hello.html" | fingerprint "sha512" | minify | fingerprint }}
|
||||||
|
|
||||||
|
HELLO: {{ $hello.Name }}|{{ $hello.RelPermalink }}|{{ $hello.Content | safeHTML }}
|
||||||
|
|
||||||
|
{{ $img := resources.Get "images/sunset.jpg" }}
|
||||||
|
{{ $fit := $img.Fit "200x200" }}
|
||||||
|
{{ $fit2 := $fit.Fit "100x200" }}
|
||||||
|
{{ $img = $img | fingerprint }}
|
||||||
|
SUNSET: {{ $img.Name }}|{{ $img.RelPermalink }}|{{ $img.Width }}|{{ len $img.Content }}
|
||||||
|
FIT: {{ $fit.Name }}|{{ $fit.RelPermalink }}|{{ $fit.Width }}
|
||||||
|
`)
|
||||||
|
|
||||||
|
fs := b.Fs.Source
|
||||||
|
|
||||||
|
imageDir := filepath.Join("assets", "images")
|
||||||
|
b.Assert(os.MkdirAll(imageDir, 0777), qt.IsNil)
|
||||||
|
src, err := os.Open("testdata/sunset.jpg")
|
||||||
|
b.Assert(err, qt.IsNil)
|
||||||
|
out, err := fs.Create(filepath.Join(imageDir, "sunset.jpg"))
|
||||||
|
b.Assert(err, qt.IsNil)
|
||||||
|
_, err = io.Copy(out, src)
|
||||||
|
b.Assert(err, qt.IsNil)
|
||||||
|
out.Close()
|
||||||
|
|
||||||
|
b.Running()
|
||||||
|
|
||||||
|
for i := 0; i < 2; i++ {
|
||||||
|
|
||||||
|
b.Build(BuildCfg{})
|
||||||
|
|
||||||
|
b.AssertFileContent("public/index.html",
|
||||||
|
`
|
||||||
|
SUNSET: images/sunset.jpg|/images/sunset.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587
|
||||||
|
FIT: images/sunset.jpg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_fit_q75_box.jpg|200
|
||||||
|
|
||||||
|
`)
|
||||||
|
|
||||||
|
b.EditFiles("page1.md", `
|
||||||
|
---
|
||||||
|
title: "Page 1 edit"
|
||||||
|
summary: "Edited summary"
|
||||||
|
---
|
||||||
|
|
||||||
|
Edited content.
|
||||||
|
|
||||||
|
`)
|
||||||
|
|
||||||
|
b.Assert(b.Fs.Destination.Remove("public"), qt.IsNil)
|
||||||
|
b.H.ResourceSpec.ClearCaches()
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestResourceChain(t *testing.T) {
|
func TestResourceChain(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
|
@ -353,9 +412,11 @@ Publish 2: {{ $cssPublish2.Permalink }}
|
||||||
"Publish 1: body{color:blue} /external1.min.css",
|
"Publish 1: body{color:blue} /external1.min.css",
|
||||||
"Publish 2: http://example.com/external2.min.css",
|
"Publish 2: http://example.com/external2.min.css",
|
||||||
)
|
)
|
||||||
c.Assert(b.CheckExists("public/external2.min.css"), qt.Equals, true)
|
b.Assert(b.CheckExists("public/external2.css"), qt.Equals, false)
|
||||||
c.Assert(b.CheckExists("public/external1.min.css"), qt.Equals, true)
|
b.Assert(b.CheckExists("public/external1.css"), qt.Equals, false)
|
||||||
c.Assert(b.CheckExists("public/inline.min.css"), qt.Equals, false)
|
b.Assert(b.CheckExists("public/external2.min.css"), qt.Equals, true)
|
||||||
|
b.Assert(b.CheckExists("public/external1.min.css"), qt.Equals, true)
|
||||||
|
b.Assert(b.CheckExists("public/inline.min.css"), qt.Equals, false)
|
||||||
}},
|
}},
|
||||||
|
|
||||||
{"unmarshal", func() bool { return true }, func(b *sitesBuilder) {
|
{"unmarshal", func() bool { return true }, func(b *sitesBuilder) {
|
||||||
|
|
|
@ -536,6 +536,7 @@ func (s *sitesBuilder) changeEvents() []fsnotify.Event {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *sitesBuilder) build(cfg BuildCfg, shouldFail bool) *sitesBuilder {
|
func (s *sitesBuilder) build(cfg BuildCfg, shouldFail bool) *sitesBuilder {
|
||||||
|
s.Helper()
|
||||||
defer func() {
|
defer func() {
|
||||||
s.changedFiles = nil
|
s.changedFiles = nil
|
||||||
}()
|
}()
|
||||||
|
|
|
@ -14,198 +14,98 @@
|
||||||
package resources
|
package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"image"
|
"image"
|
||||||
"image/color"
|
"image/color"
|
||||||
"image/draw"
|
"image/draw"
|
||||||
"image/jpeg"
|
_ "image/gif"
|
||||||
"io"
|
_ "image/png"
|
||||||
"os"
|
"os"
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/resources/resource"
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
|
|
||||||
_errors "github.com/pkg/errors"
|
_errors "github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/disintegration/imaging"
|
"github.com/disintegration/imaging"
|
||||||
"github.com/gohugoio/hugo/common/hugio"
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
"github.com/mitchellh/mapstructure"
|
"github.com/gohugoio/hugo/resources/images"
|
||||||
|
|
||||||
// Blind import for image.Decode
|
// Blind import for image.Decode
|
||||||
_ "image/gif"
|
|
||||||
_ "image/png"
|
|
||||||
|
|
||||||
// Blind import for image.Decode
|
// Blind import for image.Decode
|
||||||
_ "golang.org/x/image/webp"
|
_ "golang.org/x/image/webp"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
_ resource.Resource = (*Image)(nil)
|
_ resource.Image = (*imageResource)(nil)
|
||||||
_ resource.Source = (*Image)(nil)
|
_ resource.Source = (*imageResource)(nil)
|
||||||
_ resource.Cloner = (*Image)(nil)
|
_ resource.Cloner = (*imageResource)(nil)
|
||||||
)
|
)
|
||||||
|
|
||||||
// Imaging contains default image processing configuration. This will be fetched
|
// ImageResource represents an image resource.
|
||||||
// from site (or language) config.
|
type imageResource struct {
|
||||||
type Imaging struct {
|
*images.Image
|
||||||
// Default image quality setting (1-100). Only used for JPEG images.
|
|
||||||
Quality int
|
|
||||||
|
|
||||||
// Resample filter used. See https://github.com/disintegration/imaging
|
baseResource
|
||||||
ResampleFilter string
|
|
||||||
|
|
||||||
// The anchor used in Fill. Default is "smart", i.e. Smart Crop.
|
|
||||||
Anchor string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
func (i *imageResource) Clone() resource.Resource {
|
||||||
defaultJPEGQuality = 75
|
gr := i.baseResource.Clone().(baseResource)
|
||||||
defaultResampleFilter = "box"
|
return &imageResource{
|
||||||
)
|
Image: i.WithSpec(gr),
|
||||||
|
baseResource: gr,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var (
|
func (i *imageResource) cloneWithUpdates(u *transformationUpdate) (baseResource, error) {
|
||||||
imageFormats = map[string]imaging.Format{
|
base, err := i.baseResource.cloneWithUpdates(u)
|
||||||
".jpg": imaging.JPEG,
|
if err != nil {
|
||||||
".jpeg": imaging.JPEG,
|
return nil, err
|
||||||
".png": imaging.PNG,
|
|
||||||
".tif": imaging.TIFF,
|
|
||||||
".tiff": imaging.TIFF,
|
|
||||||
".bmp": imaging.BMP,
|
|
||||||
".gif": imaging.GIF,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add or increment if changes to an image format's processing requires
|
var img *images.Image
|
||||||
// re-generation.
|
|
||||||
imageFormatsVersions = map[imaging.Format]int{
|
if u.isContenChanged() {
|
||||||
imaging.PNG: 2, // Floyd Steinberg dithering
|
img = i.WithSpec(base)
|
||||||
|
} else {
|
||||||
|
img = i.Image
|
||||||
}
|
}
|
||||||
|
|
||||||
// Increment to mark all processed images as stale. Only use when absolutely needed.
|
return &imageResource{
|
||||||
// See the finer grained smartCropVersionNumber and imageFormatsVersions.
|
Image: img,
|
||||||
mainImageVersionNumber = 0
|
baseResource: base,
|
||||||
)
|
}, nil
|
||||||
|
|
||||||
var anchorPositions = map[string]imaging.Anchor{
|
|
||||||
strings.ToLower("Center"): imaging.Center,
|
|
||||||
strings.ToLower("TopLeft"): imaging.TopLeft,
|
|
||||||
strings.ToLower("Top"): imaging.Top,
|
|
||||||
strings.ToLower("TopRight"): imaging.TopRight,
|
|
||||||
strings.ToLower("Left"): imaging.Left,
|
|
||||||
strings.ToLower("Right"): imaging.Right,
|
|
||||||
strings.ToLower("BottomLeft"): imaging.BottomLeft,
|
|
||||||
strings.ToLower("Bottom"): imaging.Bottom,
|
|
||||||
strings.ToLower("BottomRight"): imaging.BottomRight,
|
|
||||||
}
|
|
||||||
|
|
||||||
var imageFilters = map[string]imaging.ResampleFilter{
|
|
||||||
strings.ToLower("NearestNeighbor"): imaging.NearestNeighbor,
|
|
||||||
strings.ToLower("Box"): imaging.Box,
|
|
||||||
strings.ToLower("Linear"): imaging.Linear,
|
|
||||||
strings.ToLower("Hermite"): imaging.Hermite,
|
|
||||||
strings.ToLower("MitchellNetravali"): imaging.MitchellNetravali,
|
|
||||||
strings.ToLower("CatmullRom"): imaging.CatmullRom,
|
|
||||||
strings.ToLower("BSpline"): imaging.BSpline,
|
|
||||||
strings.ToLower("Gaussian"): imaging.Gaussian,
|
|
||||||
strings.ToLower("Lanczos"): imaging.Lanczos,
|
|
||||||
strings.ToLower("Hann"): imaging.Hann,
|
|
||||||
strings.ToLower("Hamming"): imaging.Hamming,
|
|
||||||
strings.ToLower("Blackman"): imaging.Blackman,
|
|
||||||
strings.ToLower("Bartlett"): imaging.Bartlett,
|
|
||||||
strings.ToLower("Welch"): imaging.Welch,
|
|
||||||
strings.ToLower("Cosine"): imaging.Cosine,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Image represents an image resource.
|
|
||||||
type Image struct {
|
|
||||||
config image.Config
|
|
||||||
configInit sync.Once
|
|
||||||
configLoaded bool
|
|
||||||
|
|
||||||
imaging *Imaging
|
|
||||||
|
|
||||||
format imaging.Format
|
|
||||||
|
|
||||||
*genericResource
|
|
||||||
}
|
|
||||||
|
|
||||||
// Width returns i's width.
|
|
||||||
func (i *Image) Width() int {
|
|
||||||
i.initConfig()
|
|
||||||
return i.config.Width
|
|
||||||
}
|
|
||||||
|
|
||||||
// Height returns i's height.
|
|
||||||
func (i *Image) Height() int {
|
|
||||||
i.initConfig()
|
|
||||||
return i.config.Height
|
|
||||||
}
|
|
||||||
|
|
||||||
// WithNewBase implements the Cloner interface.
|
|
||||||
func (i *Image) WithNewBase(base string) resource.Resource {
|
|
||||||
return &Image{
|
|
||||||
imaging: i.imaging,
|
|
||||||
format: i.format,
|
|
||||||
genericResource: i.genericResource.WithNewBase(base).(*genericResource)}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resize resizes the image to the specified width and height using the specified resampling
|
// Resize resizes the image to the specified width and height using the specified resampling
|
||||||
// filter and returns the transformed image. If one of width or height is 0, the image aspect
|
// filter and returns the transformed image. If one of width or height is 0, the image aspect
|
||||||
// ratio is preserved.
|
// ratio is preserved.
|
||||||
func (i *Image) Resize(spec string) (*Image, error) {
|
func (i *imageResource) Resize(spec string) (resource.Image, error) {
|
||||||
return i.doWithImageConfig("resize", spec, func(src image.Image, conf imageConfig) (image.Image, error) {
|
return i.doWithImageConfig("resize", spec, func(src image.Image, conf images.ImageConfig) (image.Image, error) {
|
||||||
return imaging.Resize(src, conf.Width, conf.Height, conf.Filter), nil
|
return i.Proc.Resize(src, conf)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fit scales down the image using the specified resample filter to fit the specified
|
// Fit scales down the image using the specified resample filter to fit the specified
|
||||||
// maximum width and height.
|
// maximum width and height.
|
||||||
func (i *Image) Fit(spec string) (*Image, error) {
|
func (i *imageResource) Fit(spec string) (resource.Image, error) {
|
||||||
return i.doWithImageConfig("fit", spec, func(src image.Image, conf imageConfig) (image.Image, error) {
|
return i.doWithImageConfig("fit", spec, func(src image.Image, conf images.ImageConfig) (image.Image, error) {
|
||||||
return imaging.Fit(src, conf.Width, conf.Height, conf.Filter), nil
|
return i.Proc.Fit(src, conf)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fill scales the image to the smallest possible size that will cover the specified dimensions,
|
// Fill scales the image to the smallest possible size that will cover the specified dimensions,
|
||||||
// crops the resized image to the specified dimensions using the given anchor point.
|
// crops the resized image to the specified dimensions using the given anchor point.
|
||||||
// Space delimited config: 200x300 TopLeft
|
// Space delimited config: 200x300 TopLeft
|
||||||
func (i *Image) Fill(spec string) (*Image, error) {
|
func (i *imageResource) Fill(spec string) (resource.Image, error) {
|
||||||
return i.doWithImageConfig("fill", spec, func(src image.Image, conf imageConfig) (image.Image, error) {
|
return i.doWithImageConfig("fill", spec, func(src image.Image, conf images.ImageConfig) (image.Image, error) {
|
||||||
if conf.AnchorStr == smartCropIdentifier {
|
return i.Proc.Fill(src, conf)
|
||||||
return smartCrop(src, conf.Width, conf.Height, conf.Anchor, conf.Filter)
|
|
||||||
}
|
|
||||||
return imaging.Fill(src, conf.Width, conf.Height, conf.Anchor, conf.Filter), nil
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Holds configuration to create a new image from an existing one, resize etc.
|
func (i *imageResource) isJPEG() bool {
|
||||||
type imageConfig struct {
|
name := strings.ToLower(i.getResourcePaths().relTargetDirFile.file)
|
||||||
Action string
|
|
||||||
|
|
||||||
// Quality ranges from 1 to 100 inclusive, higher is better.
|
|
||||||
// This is only relevant for JPEG images.
|
|
||||||
// Default is 75.
|
|
||||||
Quality int
|
|
||||||
|
|
||||||
// Rotate rotates an image by the given angle counter-clockwise.
|
|
||||||
// The rotation will be performed first.
|
|
||||||
Rotate int
|
|
||||||
|
|
||||||
Width int
|
|
||||||
Height int
|
|
||||||
|
|
||||||
Filter imaging.ResampleFilter
|
|
||||||
FilterStr string
|
|
||||||
|
|
||||||
Anchor imaging.Anchor
|
|
||||||
AnchorStr string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *Image) isJPEG() bool {
|
|
||||||
name := strings.ToLower(i.relTargetDirFile.file)
|
|
||||||
return strings.HasSuffix(name, ".jpg") || strings.HasSuffix(name, ".jpeg")
|
return strings.HasSuffix(name, ".jpg") || strings.HasSuffix(name, ".jpeg")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -218,42 +118,20 @@ const imageProcWorkers = 1
|
||||||
|
|
||||||
var imageProcSem = make(chan bool, imageProcWorkers)
|
var imageProcSem = make(chan bool, imageProcWorkers)
|
||||||
|
|
||||||
func (i *Image) doWithImageConfig(action, spec string, f func(src image.Image, conf imageConfig) (image.Image, error)) (*Image, error) {
|
func (i *imageResource) doWithImageConfig(action, spec string, f func(src image.Image, conf images.ImageConfig) (image.Image, error)) (resource.Image, error) {
|
||||||
conf, err := parseImageConfig(spec)
|
conf, err := i.decodeImageConfig(action, spec)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
conf.Action = action
|
|
||||||
|
|
||||||
if conf.Quality <= 0 && i.isJPEG() {
|
return i.getSpec().imageCache.getOrCreate(i, conf, func() (*imageResource, image.Image, error) {
|
||||||
// We need a quality setting for all JPEGs
|
|
||||||
conf.Quality = i.imaging.Quality
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.FilterStr == "" {
|
|
||||||
conf.FilterStr = i.imaging.ResampleFilter
|
|
||||||
conf.Filter = imageFilters[conf.FilterStr]
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.AnchorStr == "" {
|
|
||||||
conf.AnchorStr = i.imaging.Anchor
|
|
||||||
if !strings.EqualFold(conf.AnchorStr, smartCropIdentifier) {
|
|
||||||
conf.Anchor = anchorPositions[conf.AnchorStr]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return i.spec.imageCache.getOrCreate(i, conf, func() (*Image, image.Image, error) {
|
|
||||||
imageProcSem <- true
|
imageProcSem <- true
|
||||||
defer func() {
|
defer func() {
|
||||||
<-imageProcSem
|
<-imageProcSem
|
||||||
}()
|
}()
|
||||||
|
|
||||||
ci := i.clone()
|
|
||||||
|
|
||||||
errOp := action
|
errOp := action
|
||||||
errPath := i.sourceFilename
|
errPath := i.getSourceFilename()
|
||||||
|
|
||||||
ci.setBasePath(conf)
|
|
||||||
|
|
||||||
src, err := i.decodeSource()
|
src, err := i.decodeSource()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -267,10 +145,10 @@ func (i *Image) doWithImageConfig(action, spec string, f func(src image.Image, c
|
||||||
|
|
||||||
converted, err := f(src, conf)
|
converted, err := f(src, conf)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ci, nil, &os.PathError{Op: errOp, Path: errPath, Err: err}
|
return nil, nil, &os.PathError{Op: errOp, Path: errPath, Err: err}
|
||||||
}
|
}
|
||||||
|
|
||||||
if i.format == imaging.PNG {
|
if i.Format == imaging.PNG {
|
||||||
// Apply the colour palette from the source
|
// Apply the colour palette from the source
|
||||||
if paletted, ok := src.(*image.Paletted); ok {
|
if paletted, ok := src.(*image.Paletted); ok {
|
||||||
tmp := image.NewPaletted(converted.Bounds(), paletted.Palette)
|
tmp := image.NewPaletted(converted.Bounds(), paletted.Palette)
|
||||||
|
@ -279,177 +157,30 @@ func (i *Image) doWithImageConfig(action, spec string, f func(src image.Image, c
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
b := converted.Bounds()
|
ci := i.clone(converted)
|
||||||
ci.config = image.Config{Width: b.Max.X, Height: b.Max.Y}
|
ci.setBasePath(conf)
|
||||||
ci.configLoaded = true
|
|
||||||
|
|
||||||
return ci, converted, nil
|
return ci, converted, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i imageConfig) key(format imaging.Format) string {
|
func (i *imageResource) decodeImageConfig(action, spec string) (images.ImageConfig, error) {
|
||||||
k := strconv.Itoa(i.Width) + "x" + strconv.Itoa(i.Height)
|
conf, err := images.DecodeImageConfig(action, spec, i.Proc.Cfg)
|
||||||
if i.Action != "" {
|
if err != nil {
|
||||||
k += "_" + i.Action
|
return conf, err
|
||||||
}
|
|
||||||
if i.Quality > 0 {
|
|
||||||
k += "_q" + strconv.Itoa(i.Quality)
|
|
||||||
}
|
|
||||||
if i.Rotate != 0 {
|
|
||||||
k += "_r" + strconv.Itoa(i.Rotate)
|
|
||||||
}
|
|
||||||
anchor := i.AnchorStr
|
|
||||||
if anchor == smartCropIdentifier {
|
|
||||||
anchor = anchor + strconv.Itoa(smartCropVersionNumber)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
k += "_" + i.FilterStr
|
iconf := i.Proc.Cfg
|
||||||
|
|
||||||
if strings.EqualFold(i.Action, "fill") {
|
if conf.Quality <= 0 && i.isJPEG() {
|
||||||
k += "_" + anchor
|
// We need a quality setting for all JPEGs
|
||||||
|
conf.Quality = iconf.Quality
|
||||||
}
|
}
|
||||||
|
|
||||||
if v, ok := imageFormatsVersions[format]; ok {
|
return conf, nil
|
||||||
k += "_" + strconv.Itoa(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
if mainImageVersionNumber > 0 {
|
|
||||||
k += "_" + strconv.Itoa(mainImageVersionNumber)
|
|
||||||
}
|
|
||||||
|
|
||||||
return k
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newImageConfig(width, height, quality, rotate int, filter, anchor string) imageConfig {
|
func (i *imageResource) decodeSource() (image.Image, error) {
|
||||||
var c imageConfig
|
|
||||||
|
|
||||||
c.Width = width
|
|
||||||
c.Height = height
|
|
||||||
c.Quality = quality
|
|
||||||
c.Rotate = rotate
|
|
||||||
|
|
||||||
if filter != "" {
|
|
||||||
filter = strings.ToLower(filter)
|
|
||||||
if v, ok := imageFilters[filter]; ok {
|
|
||||||
c.Filter = v
|
|
||||||
c.FilterStr = filter
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if anchor != "" {
|
|
||||||
anchor = strings.ToLower(anchor)
|
|
||||||
if v, ok := anchorPositions[anchor]; ok {
|
|
||||||
c.Anchor = v
|
|
||||||
c.AnchorStr = anchor
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseImageConfig(config string) (imageConfig, error) {
|
|
||||||
var (
|
|
||||||
c imageConfig
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
if config == "" {
|
|
||||||
return c, errors.New("image config cannot be empty")
|
|
||||||
}
|
|
||||||
|
|
||||||
parts := strings.Fields(config)
|
|
||||||
for _, part := range parts {
|
|
||||||
part = strings.ToLower(part)
|
|
||||||
|
|
||||||
if part == smartCropIdentifier {
|
|
||||||
c.AnchorStr = smartCropIdentifier
|
|
||||||
} else if pos, ok := anchorPositions[part]; ok {
|
|
||||||
c.Anchor = pos
|
|
||||||
c.AnchorStr = part
|
|
||||||
} else if filter, ok := imageFilters[part]; ok {
|
|
||||||
c.Filter = filter
|
|
||||||
c.FilterStr = part
|
|
||||||
} else if part[0] == 'q' {
|
|
||||||
c.Quality, err = strconv.Atoi(part[1:])
|
|
||||||
if err != nil {
|
|
||||||
return c, err
|
|
||||||
}
|
|
||||||
if c.Quality < 1 || c.Quality > 100 {
|
|
||||||
return c, errors.New("quality ranges from 1 to 100 inclusive")
|
|
||||||
}
|
|
||||||
} else if part[0] == 'r' {
|
|
||||||
c.Rotate, err = strconv.Atoi(part[1:])
|
|
||||||
if err != nil {
|
|
||||||
return c, err
|
|
||||||
}
|
|
||||||
} else if strings.Contains(part, "x") {
|
|
||||||
widthHeight := strings.Split(part, "x")
|
|
||||||
if len(widthHeight) <= 2 {
|
|
||||||
first := widthHeight[0]
|
|
||||||
if first != "" {
|
|
||||||
c.Width, err = strconv.Atoi(first)
|
|
||||||
if err != nil {
|
|
||||||
return c, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(widthHeight) == 2 {
|
|
||||||
second := widthHeight[1]
|
|
||||||
if second != "" {
|
|
||||||
c.Height, err = strconv.Atoi(second)
|
|
||||||
if err != nil {
|
|
||||||
return c, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return c, errors.New("invalid image dimensions")
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if c.Width == 0 && c.Height == 0 {
|
|
||||||
return c, errors.New("must provide Width or Height")
|
|
||||||
}
|
|
||||||
|
|
||||||
return c, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *Image) initConfig() error {
|
|
||||||
var err error
|
|
||||||
i.configInit.Do(func() {
|
|
||||||
if i.configLoaded {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
f hugio.ReadSeekCloser
|
|
||||||
config image.Config
|
|
||||||
)
|
|
||||||
|
|
||||||
f, err = i.ReadSeekCloser()
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
config, _, err = image.DecodeConfig(f)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
i.config = config
|
|
||||||
})
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return _errors.Wrap(err, "failed to load image config")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *Image) decodeSource() (image.Image, error) {
|
|
||||||
f, err := i.ReadSeekCloser()
|
f, err := i.ReadSeekCloser()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, _errors.Wrap(err, "failed to open image for decode")
|
return nil, _errors.Wrap(err, "failed to open image for decode")
|
||||||
|
@ -459,80 +190,39 @@ func (i *Image) decodeSource() (image.Image, error) {
|
||||||
return img, err
|
return img, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// returns an opened file or nil if nothing to write.
|
func (i *imageResource) clone(img image.Image) *imageResource {
|
||||||
func (i *Image) openDestinationsForWriting() (io.WriteCloser, error) {
|
spec := i.baseResource.Clone().(baseResource)
|
||||||
targetFilenames := i.targetFilenames()
|
|
||||||
var changedFilenames []string
|
|
||||||
|
|
||||||
// Fast path:
|
var image *images.Image
|
||||||
// This is a processed version of the original;
|
if img != nil {
|
||||||
// check if it already existis at the destination.
|
image = i.WithImage(img)
|
||||||
for _, targetFilename := range targetFilenames {
|
} else {
|
||||||
if _, err := i.spec.BaseFs.PublishFs.Stat(targetFilename); err == nil {
|
image = i.WithSpec(spec)
|
||||||
continue
|
|
||||||
}
|
|
||||||
changedFilenames = append(changedFilenames, targetFilename)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(changedFilenames) == 0 {
|
return &imageResource{
|
||||||
return nil, nil
|
Image: image,
|
||||||
}
|
baseResource: spec,
|
||||||
|
|
||||||
return helpers.OpenFilesForWriting(i.spec.BaseFs.PublishFs, changedFilenames...)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *Image) encodeTo(conf imageConfig, img image.Image, w io.Writer) error {
|
|
||||||
switch i.format {
|
|
||||||
case imaging.JPEG:
|
|
||||||
|
|
||||||
var rgba *image.RGBA
|
|
||||||
quality := conf.Quality
|
|
||||||
|
|
||||||
if nrgba, ok := img.(*image.NRGBA); ok {
|
|
||||||
if nrgba.Opaque() {
|
|
||||||
rgba = &image.RGBA{
|
|
||||||
Pix: nrgba.Pix,
|
|
||||||
Stride: nrgba.Stride,
|
|
||||||
Rect: nrgba.Rect,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if rgba != nil {
|
|
||||||
return jpeg.Encode(w, rgba, &jpeg.Options{Quality: quality})
|
|
||||||
}
|
|
||||||
return jpeg.Encode(w, img, &jpeg.Options{Quality: quality})
|
|
||||||
default:
|
|
||||||
return imaging.Encode(w, img, i.format)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Image) clone() *Image {
|
func (i *imageResource) setBasePath(conf images.ImageConfig) {
|
||||||
g := *i.genericResource
|
i.getResourcePaths().relTargetDirFile = i.relTargetPathFromConfig(conf)
|
||||||
g.resourceContent = &resourceContent{}
|
}
|
||||||
if g.publishOnce != nil {
|
|
||||||
g.publishOnce = &publishOnce{logger: g.publishOnce.logger}
|
func (i *imageResource) relTargetPathFromConfig(conf images.ImageConfig) dirFile {
|
||||||
|
p1, p2 := helpers.FileAndExt(i.getResourcePaths().relTargetDirFile.file)
|
||||||
|
if conf.Action == "trace" {
|
||||||
|
p2 = ".svg"
|
||||||
}
|
}
|
||||||
|
|
||||||
return &Image{
|
h, _ := i.hash()
|
||||||
imaging: i.imaging,
|
idStr := fmt.Sprintf("_hu%s_%d", h, i.size())
|
||||||
format: i.format,
|
|
||||||
genericResource: &g}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *Image) setBasePath(conf imageConfig) {
|
|
||||||
i.relTargetDirFile = i.relTargetPathFromConfig(conf)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *Image) relTargetPathFromConfig(conf imageConfig) dirFile {
|
|
||||||
p1, p2 := helpers.FileAndExt(i.relTargetDirFile.file)
|
|
||||||
|
|
||||||
idStr := fmt.Sprintf("_hu%s_%d", i.hash, i.osFileInfo.Size())
|
|
||||||
|
|
||||||
// Do not change for no good reason.
|
// Do not change for no good reason.
|
||||||
const md5Threshold = 100
|
const md5Threshold = 100
|
||||||
|
|
||||||
key := conf.key(i.format)
|
key := conf.Key(i.Format)
|
||||||
|
|
||||||
// It is useful to have the key in clear text, but when nesting transforms, it
|
// It is useful to have the key in clear text, but when nesting transforms, it
|
||||||
// can easily be too long to read, and maybe even too long
|
// can easily be too long to read, and maybe even too long
|
||||||
|
@ -554,43 +244,7 @@ func (i *Image) relTargetPathFromConfig(conf imageConfig) dirFile {
|
||||||
}
|
}
|
||||||
|
|
||||||
return dirFile{
|
return dirFile{
|
||||||
dir: i.relTargetDirFile.dir,
|
dir: i.getResourcePaths().relTargetDirFile.dir,
|
||||||
file: fmt.Sprintf("%s%s_%s%s", p1, idStr, key, p2),
|
file: fmt.Sprintf("%s%s_%s%s", p1, idStr, key, p2),
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func decodeImaging(m map[string]interface{}) (Imaging, error) {
|
|
||||||
var i Imaging
|
|
||||||
if err := mapstructure.WeakDecode(m, &i); err != nil {
|
|
||||||
return i, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if i.Quality == 0 {
|
|
||||||
i.Quality = defaultJPEGQuality
|
|
||||||
} else if i.Quality < 0 || i.Quality > 100 {
|
|
||||||
return i, errors.New("JPEG quality must be a number between 1 and 100")
|
|
||||||
}
|
|
||||||
|
|
||||||
if i.Anchor == "" || strings.EqualFold(i.Anchor, smartCropIdentifier) {
|
|
||||||
i.Anchor = smartCropIdentifier
|
|
||||||
} else {
|
|
||||||
i.Anchor = strings.ToLower(i.Anchor)
|
|
||||||
if _, found := anchorPositions[i.Anchor]; !found {
|
|
||||||
return i, errors.New("invalid anchor value in imaging config")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if i.ResampleFilter == "" {
|
|
||||||
i.ResampleFilter = defaultResampleFilter
|
|
||||||
} else {
|
|
||||||
filter := strings.ToLower(i.ResampleFilter)
|
|
||||||
_, found := imageFilters[filter]
|
|
||||||
if !found {
|
|
||||||
return i, fmt.Errorf("%q is not a valid resample filter", filter)
|
|
||||||
}
|
|
||||||
i.ResampleFilter = filter
|
|
||||||
}
|
|
||||||
|
|
||||||
return i, nil
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/hugio"
|
"github.com/gohugoio/hugo/resources/images"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/cache/filecache"
|
"github.com/gohugoio/hugo/cache/filecache"
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
@ -32,7 +32,7 @@ type imageCache struct {
|
||||||
fileCache *filecache.Cache
|
fileCache *filecache.Cache
|
||||||
|
|
||||||
mu sync.RWMutex
|
mu sync.RWMutex
|
||||||
store map[string]*Image
|
store map[string]*resourceAdapter
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *imageCache) isInCache(key string) bool {
|
func (c *imageCache) isInCache(key string) bool {
|
||||||
|
@ -66,33 +66,34 @@ func (c *imageCache) normalizeKey(key string) string {
|
||||||
func (c *imageCache) clear() {
|
func (c *imageCache) clear() {
|
||||||
c.mu.Lock()
|
c.mu.Lock()
|
||||||
defer c.mu.Unlock()
|
defer c.mu.Unlock()
|
||||||
c.store = make(map[string]*Image)
|
c.store = make(map[string]*resourceAdapter)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *imageCache) getOrCreate(
|
func (c *imageCache) getOrCreate(
|
||||||
parent *Image, conf imageConfig, createImage func() (*Image, image.Image, error)) (*Image, error) {
|
parent *imageResource, conf images.ImageConfig,
|
||||||
|
createImage func() (*imageResource, image.Image, error)) (*resourceAdapter, error) {
|
||||||
relTarget := parent.relTargetPathFromConfig(conf)
|
relTarget := parent.relTargetPathFromConfig(conf)
|
||||||
key := parent.relTargetPathForRel(relTarget.path(), false, false, false)
|
key := parent.relTargetPathForRel(relTarget.path(), false, false, false)
|
||||||
|
|
||||||
// First check the in-memory store, then the disk.
|
// First check the in-memory store, then the disk.
|
||||||
c.mu.RLock()
|
c.mu.RLock()
|
||||||
img, found := c.store[key]
|
cachedImage, found := c.store[key]
|
||||||
c.mu.RUnlock()
|
c.mu.RUnlock()
|
||||||
|
|
||||||
if found {
|
if found {
|
||||||
return img, nil
|
return cachedImage, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var img *imageResource
|
||||||
|
|
||||||
// These funcs are protected by a named lock.
|
// These funcs are protected by a named lock.
|
||||||
// read clones the parent to its new name and copies
|
// read clones the parent to its new name and copies
|
||||||
// the content to the destinations.
|
// the content to the destinations.
|
||||||
read := func(info filecache.ItemInfo, r io.Reader) error {
|
read := func(info filecache.ItemInfo, r io.Reader) error {
|
||||||
img = parent.clone()
|
img = parent.clone(nil)
|
||||||
img.relTargetDirFile.file = relTarget.file
|
rp := img.getResourcePaths()
|
||||||
img.sourceFilename = info.Name
|
rp.relTargetDirFile.file = relTarget.file
|
||||||
// Make sure it's always loaded by sourceFilename.
|
img.setSourceFilename(info.Name)
|
||||||
img.openReadSeekerCloser = nil
|
|
||||||
|
|
||||||
w, err := img.openDestinationsForWriting()
|
w, err := img.openDestinationsForWriting()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -109,29 +110,20 @@ func (c *imageCache) getOrCreate(
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// create creates the image and encodes it to w (cache) and to its destinations.
|
// create creates the image and encodes it to the cache (w).
|
||||||
create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) {
|
create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) {
|
||||||
|
defer w.Close()
|
||||||
|
|
||||||
var conv image.Image
|
var conv image.Image
|
||||||
img, conv, err = createImage()
|
img, conv, err = createImage()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
w.Close()
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
img.relTargetDirFile.file = relTarget.file
|
rp := img.getResourcePaths()
|
||||||
img.sourceFilename = info.Name
|
rp.relTargetDirFile.file = relTarget.file
|
||||||
|
img.setSourceFilename(info.Name)
|
||||||
|
|
||||||
destinations, err := img.openDestinationsForWriting()
|
return img.EncodeTo(conf, conv, w)
|
||||||
if err != nil {
|
|
||||||
w.Close()
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if destinations != nil {
|
|
||||||
w = hugio.NewMultiWriteCloser(w, destinations)
|
|
||||||
}
|
|
||||||
defer w.Close()
|
|
||||||
|
|
||||||
return img.encodeTo(conf, conv, w)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Now look in the file cache.
|
// Now look in the file cache.
|
||||||
|
@ -147,20 +139,21 @@ func (c *imageCache) getOrCreate(
|
||||||
}
|
}
|
||||||
|
|
||||||
// The file is now stored in this cache.
|
// The file is now stored in this cache.
|
||||||
img.sourceFs = c.fileCache.Fs
|
img.setSourceFs(c.fileCache.Fs)
|
||||||
|
|
||||||
c.mu.Lock()
|
c.mu.Lock()
|
||||||
if img2, found := c.store[key]; found {
|
if cachedImage, found = c.store[key]; found {
|
||||||
c.mu.Unlock()
|
c.mu.Unlock()
|
||||||
return img2, nil
|
return cachedImage, nil
|
||||||
}
|
}
|
||||||
c.store[key] = img
|
|
||||||
|
imgAdapter := newResourceAdapter(parent.getSpec(), true, img)
|
||||||
|
c.store[key] = imgAdapter
|
||||||
c.mu.Unlock()
|
c.mu.Unlock()
|
||||||
|
|
||||||
return img, nil
|
return imgAdapter, nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newImageCache(fileCache *filecache.Cache, ps *helpers.PathSpec) *imageCache {
|
func newImageCache(fileCache *filecache.Cache, ps *helpers.PathSpec) *imageCache {
|
||||||
return &imageCache{fileCache: fileCache, pathSpec: ps, store: make(map[string]*Image)}
|
return &imageCache{fileCache: fileCache, pathSpec: ps, store: make(map[string]*resourceAdapter)}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,121 +18,101 @@ import (
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"sync"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/media"
|
||||||
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
|
|
||||||
|
"github.com/google/go-cmp/cmp"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/htesting/hqt"
|
"github.com/gohugoio/hugo/htesting/hqt"
|
||||||
|
|
||||||
"github.com/disintegration/imaging"
|
|
||||||
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestParseImageConfig(t *testing.T) {
|
var eq = qt.CmpEquals(
|
||||||
for i, this := range []struct {
|
cmp.Comparer(func(p1, p2 *resourceAdapter) bool {
|
||||||
in string
|
return p1.resourceAdapterInner == p2.resourceAdapterInner
|
||||||
expect interface{}
|
}),
|
||||||
}{
|
cmp.Comparer(func(p1, p2 *genericResource) bool { return p1 == p2 }),
|
||||||
{"300x400", newImageConfig(300, 400, 0, 0, "", "")},
|
cmp.Comparer(func(m1, m2 media.Type) bool {
|
||||||
{"100x200 bottomRight", newImageConfig(100, 200, 0, 0, "", "BottomRight")},
|
return m1.Type() == m2.Type()
|
||||||
{"10x20 topleft Lanczos", newImageConfig(10, 20, 0, 0, "Lanczos", "topleft")},
|
}),
|
||||||
{"linear left 10x r180", newImageConfig(10, 0, 0, 180, "linear", "left")},
|
)
|
||||||
{"x20 riGht Cosine q95", newImageConfig(0, 20, 95, 0, "cosine", "right")},
|
|
||||||
|
|
||||||
{"", false},
|
|
||||||
{"foo", false},
|
|
||||||
} {
|
|
||||||
result, err := parseImageConfig(this.in)
|
|
||||||
if b, ok := this.expect.(bool); ok && !b {
|
|
||||||
if err == nil {
|
|
||||||
t.Errorf("[%d] parseImageConfig didn't return an expected error", i)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("[%d] err: %s", i, err)
|
|
||||||
}
|
|
||||||
if fmt.Sprint(result) != fmt.Sprint(this.expect) {
|
|
||||||
t.Fatalf("[%d] got\n%v\n but expected\n%v", i, result, this.expect)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestImageTransformBasic(t *testing.T) {
|
func TestImageTransformBasic(t *testing.T) {
|
||||||
|
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
|
|
||||||
image := fetchSunset(c)
|
image := fetchSunset(c)
|
||||||
fileCache := image.spec.FileCaches.ImageCache().Fs
|
|
||||||
|
fileCache := image.(specProvider).getSpec().FileCaches.ImageCache().Fs
|
||||||
|
|
||||||
|
assertWidthHeight := func(img resource.Image, w, h int) {
|
||||||
|
c.Helper()
|
||||||
|
c.Assert(img, qt.Not(qt.IsNil))
|
||||||
|
c.Assert(img.Width(), qt.Equals, w)
|
||||||
|
c.Assert(img.Height(), qt.Equals, h)
|
||||||
|
}
|
||||||
|
|
||||||
c.Assert(image.RelPermalink(), qt.Equals, "/a/sunset.jpg")
|
c.Assert(image.RelPermalink(), qt.Equals, "/a/sunset.jpg")
|
||||||
c.Assert(image.ResourceType(), qt.Equals, "image")
|
c.Assert(image.ResourceType(), qt.Equals, "image")
|
||||||
|
assertWidthHeight(image, 900, 562)
|
||||||
|
|
||||||
resized, err := image.Resize("300x200")
|
resized, err := image.Resize("300x200")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(image != resized, qt.Equals, true)
|
c.Assert(image != resized, qt.Equals, true)
|
||||||
c.Assert(image.genericResource != resized.genericResource, qt.Equals, true)
|
c.Assert(image, qt.Not(eq), resized)
|
||||||
c.Assert(image.sourceFilename != resized.sourceFilename, qt.Equals, true)
|
assertWidthHeight(resized, 300, 200)
|
||||||
|
assertWidthHeight(image, 900, 562)
|
||||||
|
|
||||||
resized0x, err := image.Resize("x200")
|
resized0x, err := image.Resize("x200")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(resized0x.Width(), qt.Equals, 320)
|
assertWidthHeight(resized0x, 320, 200)
|
||||||
c.Assert(resized0x.Height(), qt.Equals, 200)
|
|
||||||
|
|
||||||
assertFileCache(c, fileCache, resized0x.RelPermalink(), 320, 200)
|
assertFileCache(c, fileCache, resized0x.RelPermalink(), 320, 200)
|
||||||
|
|
||||||
resizedx0, err := image.Resize("200x")
|
resizedx0, err := image.Resize("200x")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(resizedx0.Width(), qt.Equals, 200)
|
assertWidthHeight(resizedx0, 200, 125)
|
||||||
c.Assert(resizedx0.Height(), qt.Equals, 125)
|
|
||||||
assertFileCache(c, fileCache, resizedx0.RelPermalink(), 200, 125)
|
assertFileCache(c, fileCache, resizedx0.RelPermalink(), 200, 125)
|
||||||
|
|
||||||
resizedAndRotated, err := image.Resize("x200 r90")
|
resizedAndRotated, err := image.Resize("x200 r90")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(resizedAndRotated.Width(), qt.Equals, 125)
|
assertWidthHeight(resizedAndRotated, 125, 200)
|
||||||
c.Assert(resizedAndRotated.Height(), qt.Equals, 200)
|
|
||||||
assertFileCache(c, fileCache, resizedAndRotated.RelPermalink(), 125, 200)
|
assertFileCache(c, fileCache, resizedAndRotated.RelPermalink(), 125, 200)
|
||||||
|
|
||||||
|
assertWidthHeight(resized, 300, 200)
|
||||||
c.Assert(resized.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_resize_q68_linear.jpg")
|
c.Assert(resized.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_resize_q68_linear.jpg")
|
||||||
c.Assert(resized.Width(), qt.Equals, 300)
|
|
||||||
c.Assert(resized.Height(), qt.Equals, 200)
|
|
||||||
|
|
||||||
fitted, err := resized.Fit("50x50")
|
fitted, err := resized.Fit("50x50")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(fitted.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_625708021e2bb281c9f1002f88e4753f.jpg")
|
c.Assert(fitted.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_625708021e2bb281c9f1002f88e4753f.jpg")
|
||||||
c.Assert(fitted.Width(), qt.Equals, 50)
|
assertWidthHeight(fitted, 50, 33)
|
||||||
c.Assert(fitted.Height(), qt.Equals, 33)
|
|
||||||
|
|
||||||
// Check the MD5 key threshold
|
// Check the MD5 key threshold
|
||||||
fittedAgain, _ := fitted.Fit("10x20")
|
fittedAgain, _ := fitted.Fit("10x20")
|
||||||
fittedAgain, err = fittedAgain.Fit("10x20")
|
fittedAgain, err = fittedAgain.Fit("10x20")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(fittedAgain.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_3f65ba24dc2b7fba0f56d7f104519157.jpg")
|
c.Assert(fittedAgain.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_3f65ba24dc2b7fba0f56d7f104519157.jpg")
|
||||||
c.Assert(fittedAgain.Width(), qt.Equals, 10)
|
assertWidthHeight(fittedAgain, 10, 6)
|
||||||
c.Assert(fittedAgain.Height(), qt.Equals, 6)
|
|
||||||
|
|
||||||
filled, err := image.Fill("200x100 bottomLeft")
|
filled, err := image.Fill("200x100 bottomLeft")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(filled.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_bottomleft.jpg")
|
c.Assert(filled.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_bottomleft.jpg")
|
||||||
c.Assert(filled.Width(), qt.Equals, 200)
|
assertWidthHeight(filled, 200, 100)
|
||||||
c.Assert(filled.Height(), qt.Equals, 100)
|
|
||||||
assertFileCache(c, fileCache, filled.RelPermalink(), 200, 100)
|
assertFileCache(c, fileCache, filled.RelPermalink(), 200, 100)
|
||||||
|
|
||||||
smart, err := image.Fill("200x100 smart")
|
smart, err := image.Fill("200x100 smart")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(smart.RelPermalink(), qt.Equals, fmt.Sprintf("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_smart%d.jpg", smartCropVersionNumber))
|
c.Assert(smart.RelPermalink(), qt.Equals, fmt.Sprintf("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_smart%d.jpg", 1))
|
||||||
c.Assert(smart.Width(), qt.Equals, 200)
|
assertWidthHeight(smart, 200, 100)
|
||||||
c.Assert(smart.Height(), qt.Equals, 100)
|
|
||||||
assertFileCache(c, fileCache, smart.RelPermalink(), 200, 100)
|
assertFileCache(c, fileCache, smart.RelPermalink(), 200, 100)
|
||||||
|
|
||||||
// Check cache
|
// Check cache
|
||||||
filledAgain, err := image.Fill("200x100 bottomLeft")
|
filledAgain, err := image.Fill("200x100 bottomLeft")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(filled == filledAgain, qt.Equals, true)
|
c.Assert(filled, eq, filledAgain)
|
||||||
c.Assert(filled.sourceFilename == filledAgain.sourceFilename, qt.Equals, true)
|
|
||||||
assertFileCache(c, fileCache, filledAgain.RelPermalink(), 200, 100)
|
assertFileCache(c, fileCache, filledAgain.RelPermalink(), 200, 100)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://github.com/gohugoio/hugo/issues/4261
|
// https://github.com/gohugoio/hugo/issues/4261
|
||||||
|
@ -158,6 +138,7 @@ func TestImageTransformLongFilename(t *testing.T) {
|
||||||
func TestImageTransformUppercaseExt(t *testing.T) {
|
func TestImageTransformUppercaseExt(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
image := fetchImage(c, "sunrise.JPG")
|
image := fetchImage(c, "sunrise.JPG")
|
||||||
|
|
||||||
resized, err := image.Resize("200x")
|
resized, err := image.Resize("200x")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(resized, qt.Not(qt.IsNil))
|
c.Assert(resized, qt.Not(qt.IsNil))
|
||||||
|
@ -173,17 +154,16 @@ func TestImagePermalinkPublishOrder(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Run(name, func(t *testing.T) {
|
t.Run(name, func(t *testing.T) {
|
||||||
|
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
spec := newTestResourceOsFs(c)
|
spec := newTestResourceOsFs(c)
|
||||||
|
|
||||||
check1 := func(img *Image) {
|
check1 := func(img resource.Image) {
|
||||||
resizedLink := "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_100x50_resize_q75_box.jpg"
|
resizedLink := "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_100x50_resize_q75_box.jpg"
|
||||||
c.Assert(img.RelPermalink(), qt.Equals, resizedLink)
|
c.Assert(img.RelPermalink(), qt.Equals, resizedLink)
|
||||||
assertImageFile(c, spec.PublishFs, resizedLink, 100, 50)
|
assertImageFile(c, spec.PublishFs, resizedLink, 100, 50)
|
||||||
}
|
}
|
||||||
|
|
||||||
check2 := func(img *Image) {
|
check2 := func(img resource.Image) {
|
||||||
c.Assert(img.RelPermalink(), qt.Equals, "/a/sunset.jpg")
|
c.Assert(img.RelPermalink(), qt.Equals, "/a/sunset.jpg")
|
||||||
assertImageFile(c, spec.PublishFs, "a/sunset.jpg", 900, 562)
|
assertImageFile(c, spec.PublishFs, "a/sunset.jpg", 900, 562)
|
||||||
}
|
}
|
||||||
|
@ -198,18 +178,16 @@ func TestImagePermalinkPublishOrder(t *testing.T) {
|
||||||
resized, err := orignal.Resize("100x50")
|
resized, err := orignal.Resize("100x50")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
check1(resized)
|
check1(resized.(resource.Image))
|
||||||
|
|
||||||
if !checkOriginalFirst {
|
if !checkOriginalFirst {
|
||||||
check2(orignal)
|
check2(orignal)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestImageTransformConcurrent(t *testing.T) {
|
func TestImageTransformConcurrent(t *testing.T) {
|
||||||
|
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
|
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
|
@ -239,12 +217,7 @@ func TestImageTransformConcurrent(t *testing.T) {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = r2.decodeSource()
|
img = r2
|
||||||
if err != nil {
|
|
||||||
t.Error("Err decode:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
img = r1
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}(i + 20)
|
}(i + 20)
|
||||||
|
@ -253,58 +226,12 @@ func TestImageTransformConcurrent(t *testing.T) {
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDecodeImaging(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
m := map[string]interface{}{
|
|
||||||
"quality": 42,
|
|
||||||
"resampleFilter": "NearestNeighbor",
|
|
||||||
"anchor": "topLeft",
|
|
||||||
}
|
|
||||||
|
|
||||||
imaging, err := decodeImaging(m)
|
|
||||||
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(imaging.Quality, qt.Equals, 42)
|
|
||||||
c.Assert(imaging.ResampleFilter, qt.Equals, "nearestneighbor")
|
|
||||||
c.Assert(imaging.Anchor, qt.Equals, "topleft")
|
|
||||||
|
|
||||||
m = map[string]interface{}{}
|
|
||||||
|
|
||||||
imaging, err = decodeImaging(m)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(imaging.Quality, qt.Equals, defaultJPEGQuality)
|
|
||||||
c.Assert(imaging.ResampleFilter, qt.Equals, "box")
|
|
||||||
c.Assert(imaging.Anchor, qt.Equals, "smart")
|
|
||||||
|
|
||||||
_, err = decodeImaging(map[string]interface{}{
|
|
||||||
"quality": 123,
|
|
||||||
})
|
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
|
||||||
|
|
||||||
_, err = decodeImaging(map[string]interface{}{
|
|
||||||
"resampleFilter": "asdf",
|
|
||||||
})
|
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
|
||||||
|
|
||||||
_, err = decodeImaging(map[string]interface{}{
|
|
||||||
"anchor": "asdf",
|
|
||||||
})
|
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
|
||||||
|
|
||||||
imaging, err = decodeImaging(map[string]interface{}{
|
|
||||||
"anchor": "Smart",
|
|
||||||
})
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(imaging.Anchor, qt.Equals, "smart")
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestImageWithMetadata(t *testing.T) {
|
func TestImageWithMetadata(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
|
|
||||||
image := fetchSunset(c)
|
image := fetchSunset(c)
|
||||||
|
|
||||||
var meta = []map[string]interface{}{
|
meta := []map[string]interface{}{
|
||||||
{
|
{
|
||||||
"title": "My Sunset",
|
"title": "My Sunset",
|
||||||
"name": "Sunset #:counter",
|
"name": "Sunset #:counter",
|
||||||
|
@ -318,71 +245,69 @@ func TestImageWithMetadata(t *testing.T) {
|
||||||
resized, err := image.Resize("200x")
|
resized, err := image.Resize("200x")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(resized.Name(), qt.Equals, "Sunset #1")
|
c.Assert(resized.Name(), qt.Equals, "Sunset #1")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestImageResize8BitPNG(t *testing.T) {
|
func TestImageResize8BitPNG(t *testing.T) {
|
||||||
|
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
|
|
||||||
image := fetchImage(c, "gohugoio.png")
|
image := fetchImage(c, "gohugoio.png")
|
||||||
|
|
||||||
c.Assert(image.format, qt.Equals, imaging.PNG)
|
c.Assert(image.MediaType().Type(), qt.Equals, "image/png")
|
||||||
c.Assert(image.RelPermalink(), qt.Equals, "/a/gohugoio.png")
|
c.Assert(image.RelPermalink(), qt.Equals, "/a/gohugoio.png")
|
||||||
c.Assert(image.ResourceType(), qt.Equals, "image")
|
c.Assert(image.ResourceType(), qt.Equals, "image")
|
||||||
|
|
||||||
resized, err := image.Resize("800x")
|
resized, err := image.Resize("800x")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(resized.format, qt.Equals, imaging.PNG)
|
c.Assert(resized.MediaType().Type(), qt.Equals, "image/png")
|
||||||
c.Assert(resized.RelPermalink(), qt.Equals, "/a/gohugoio_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_800x0_resize_linear_2.png")
|
c.Assert(resized.RelPermalink(), qt.Equals, "/a/gohugoio_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_800x0_resize_linear_2.png")
|
||||||
c.Assert(resized.Width(), qt.Equals, 800)
|
c.Assert(resized.Width(), qt.Equals, 800)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestImageResizeInSubPath(t *testing.T) {
|
func TestImageResizeInSubPath(t *testing.T) {
|
||||||
|
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
|
|
||||||
image := fetchImage(c, "sub/gohugoio2.png")
|
image := fetchImage(c, "sub/gohugoio2.png")
|
||||||
fileCache := image.spec.FileCaches.ImageCache().Fs
|
fileCache := image.(specProvider).getSpec().FileCaches.ImageCache().Fs
|
||||||
|
|
||||||
c.Assert(image.format, qt.Equals, imaging.PNG)
|
c.Assert(image.MediaType(), eq, media.PNGType)
|
||||||
c.Assert(image.RelPermalink(), qt.Equals, "/a/sub/gohugoio2.png")
|
c.Assert(image.RelPermalink(), qt.Equals, "/a/sub/gohugoio2.png")
|
||||||
c.Assert(image.ResourceType(), qt.Equals, "image")
|
c.Assert(image.ResourceType(), qt.Equals, "image")
|
||||||
|
|
||||||
resized, err := image.Resize("101x101")
|
resized, err := image.Resize("101x101")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(resized.format, qt.Equals, imaging.PNG)
|
c.Assert(resized.MediaType().Type(), qt.Equals, "image/png")
|
||||||
c.Assert(resized.RelPermalink(), qt.Equals, "/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_2.png")
|
c.Assert(resized.RelPermalink(), qt.Equals, "/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_2.png")
|
||||||
c.Assert(resized.Width(), qt.Equals, 101)
|
c.Assert(resized.Width(), qt.Equals, 101)
|
||||||
|
|
||||||
assertFileCache(c, fileCache, resized.RelPermalink(), 101, 101)
|
assertFileCache(c, fileCache, resized.RelPermalink(), 101, 101)
|
||||||
publishedImageFilename := filepath.Clean(resized.RelPermalink())
|
publishedImageFilename := filepath.Clean(resized.RelPermalink())
|
||||||
assertImageFile(c, image.spec.BaseFs.PublishFs, publishedImageFilename, 101, 101)
|
|
||||||
c.Assert(image.spec.BaseFs.PublishFs.Remove(publishedImageFilename), qt.IsNil)
|
spec := image.(specProvider).getSpec()
|
||||||
|
|
||||||
|
assertImageFile(c, spec.BaseFs.PublishFs, publishedImageFilename, 101, 101)
|
||||||
|
c.Assert(spec.BaseFs.PublishFs.Remove(publishedImageFilename), qt.IsNil)
|
||||||
|
|
||||||
// Cleare mem cache to simulate reading from the file cache.
|
// Cleare mem cache to simulate reading from the file cache.
|
||||||
resized.spec.imageCache.clear()
|
spec.imageCache.clear()
|
||||||
|
|
||||||
resizedAgain, err := image.Resize("101x101")
|
resizedAgain, err := image.Resize("101x101")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(resizedAgain.RelPermalink(), qt.Equals, "/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_2.png")
|
c.Assert(resizedAgain.RelPermalink(), qt.Equals, "/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_2.png")
|
||||||
c.Assert(resizedAgain.Width(), qt.Equals, 101)
|
c.Assert(resizedAgain.Width(), qt.Equals, 101)
|
||||||
assertFileCache(c, fileCache, resizedAgain.RelPermalink(), 101, 101)
|
assertFileCache(c, fileCache, resizedAgain.RelPermalink(), 101, 101)
|
||||||
assertImageFile(c, image.spec.BaseFs.PublishFs, publishedImageFilename, 101, 101)
|
assertImageFile(c, image.(specProvider).getSpec().BaseFs.PublishFs, publishedImageFilename, 101, 101)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSVGImage(t *testing.T) {
|
func TestSVGImage(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
spec := newTestResourceSpec(c)
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
svg := fetchResourceForSpec(spec, c, "circle.svg")
|
svg := fetchResourceForSpec(spec, c, "circle.svg")
|
||||||
c.Assert(svg, qt.Not(qt.IsNil))
|
c.Assert(svg, qt.Not(qt.IsNil))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSVGImageContent(t *testing.T) {
|
func TestSVGImageContent(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
spec := newTestResourceSpec(c)
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
svg := fetchResourceForSpec(spec, c, "circle.svg")
|
svg := fetchResourceForSpec(spec, c, "circle.svg")
|
||||||
c.Assert(svg, qt.Not(qt.IsNil))
|
c.Assert(svg, qt.Not(qt.IsNil))
|
||||||
|
|
||||||
|
|
276
resources/images/config.go
Normal file
276
resources/images/config.go
Normal file
|
@ -0,0 +1,276 @@
|
||||||
|
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package images
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/disintegration/imaging"
|
||||||
|
"github.com/mitchellh/mapstructure"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
defaultJPEGQuality = 75
|
||||||
|
defaultResampleFilter = "box"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
imageFormats = map[string]imaging.Format{
|
||||||
|
".jpg": imaging.JPEG,
|
||||||
|
".jpeg": imaging.JPEG,
|
||||||
|
".png": imaging.PNG,
|
||||||
|
".tif": imaging.TIFF,
|
||||||
|
".tiff": imaging.TIFF,
|
||||||
|
".bmp": imaging.BMP,
|
||||||
|
".gif": imaging.GIF,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add or increment if changes to an image format's processing requires
|
||||||
|
// re-generation.
|
||||||
|
imageFormatsVersions = map[imaging.Format]int{
|
||||||
|
imaging.PNG: 2, // Floyd Steinberg dithering
|
||||||
|
}
|
||||||
|
|
||||||
|
// Increment to mark all processed images as stale. Only use when absolutely needed.
|
||||||
|
// See the finer grained smartCropVersionNumber and imageFormatsVersions.
|
||||||
|
mainImageVersionNumber = 0
|
||||||
|
|
||||||
|
// Increment to mark all traced SVGs as stale.
|
||||||
|
traceVersionNumber = 0
|
||||||
|
)
|
||||||
|
|
||||||
|
var anchorPositions = map[string]imaging.Anchor{
|
||||||
|
strings.ToLower("Center"): imaging.Center,
|
||||||
|
strings.ToLower("TopLeft"): imaging.TopLeft,
|
||||||
|
strings.ToLower("Top"): imaging.Top,
|
||||||
|
strings.ToLower("TopRight"): imaging.TopRight,
|
||||||
|
strings.ToLower("Left"): imaging.Left,
|
||||||
|
strings.ToLower("Right"): imaging.Right,
|
||||||
|
strings.ToLower("BottomLeft"): imaging.BottomLeft,
|
||||||
|
strings.ToLower("Bottom"): imaging.Bottom,
|
||||||
|
strings.ToLower("BottomRight"): imaging.BottomRight,
|
||||||
|
}
|
||||||
|
|
||||||
|
var imageFilters = map[string]imaging.ResampleFilter{
|
||||||
|
strings.ToLower("NearestNeighbor"): imaging.NearestNeighbor,
|
||||||
|
strings.ToLower("Box"): imaging.Box,
|
||||||
|
strings.ToLower("Linear"): imaging.Linear,
|
||||||
|
strings.ToLower("Hermite"): imaging.Hermite,
|
||||||
|
strings.ToLower("MitchellNetravali"): imaging.MitchellNetravali,
|
||||||
|
strings.ToLower("CatmullRom"): imaging.CatmullRom,
|
||||||
|
strings.ToLower("BSpline"): imaging.BSpline,
|
||||||
|
strings.ToLower("Gaussian"): imaging.Gaussian,
|
||||||
|
strings.ToLower("Lanczos"): imaging.Lanczos,
|
||||||
|
strings.ToLower("Hann"): imaging.Hann,
|
||||||
|
strings.ToLower("Hamming"): imaging.Hamming,
|
||||||
|
strings.ToLower("Blackman"): imaging.Blackman,
|
||||||
|
strings.ToLower("Bartlett"): imaging.Bartlett,
|
||||||
|
strings.ToLower("Welch"): imaging.Welch,
|
||||||
|
strings.ToLower("Cosine"): imaging.Cosine,
|
||||||
|
}
|
||||||
|
|
||||||
|
func ImageFormatFromExt(ext string) (imaging.Format, bool) {
|
||||||
|
f, found := imageFormats[ext]
|
||||||
|
return f, found
|
||||||
|
}
|
||||||
|
|
||||||
|
func DecodeConfig(m map[string]interface{}) (Imaging, error) {
|
||||||
|
var i Imaging
|
||||||
|
if err := mapstructure.WeakDecode(m, &i); err != nil {
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.Quality == 0 {
|
||||||
|
i.Quality = defaultJPEGQuality
|
||||||
|
} else if i.Quality < 0 || i.Quality > 100 {
|
||||||
|
return i, errors.New("JPEG quality must be a number between 1 and 100")
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.Anchor == "" || strings.EqualFold(i.Anchor, SmartCropIdentifier) {
|
||||||
|
i.Anchor = SmartCropIdentifier
|
||||||
|
} else {
|
||||||
|
i.Anchor = strings.ToLower(i.Anchor)
|
||||||
|
if _, found := anchorPositions[i.Anchor]; !found {
|
||||||
|
return i, errors.New("invalid anchor value in imaging config")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.ResampleFilter == "" {
|
||||||
|
i.ResampleFilter = defaultResampleFilter
|
||||||
|
} else {
|
||||||
|
filter := strings.ToLower(i.ResampleFilter)
|
||||||
|
_, found := imageFilters[filter]
|
||||||
|
if !found {
|
||||||
|
return i, fmt.Errorf("%q is not a valid resample filter", filter)
|
||||||
|
}
|
||||||
|
i.ResampleFilter = filter
|
||||||
|
}
|
||||||
|
|
||||||
|
return i, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func DecodeImageConfig(action, config string, defaults Imaging) (ImageConfig, error) {
|
||||||
|
var (
|
||||||
|
c ImageConfig
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
c.Action = action
|
||||||
|
|
||||||
|
if config == "" {
|
||||||
|
return c, errors.New("image config cannot be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
parts := strings.Fields(config)
|
||||||
|
for _, part := range parts {
|
||||||
|
part = strings.ToLower(part)
|
||||||
|
|
||||||
|
if part == SmartCropIdentifier {
|
||||||
|
c.AnchorStr = SmartCropIdentifier
|
||||||
|
} else if pos, ok := anchorPositions[part]; ok {
|
||||||
|
c.Anchor = pos
|
||||||
|
c.AnchorStr = part
|
||||||
|
} else if filter, ok := imageFilters[part]; ok {
|
||||||
|
c.Filter = filter
|
||||||
|
c.FilterStr = part
|
||||||
|
} else if part[0] == 'q' {
|
||||||
|
c.Quality, err = strconv.Atoi(part[1:])
|
||||||
|
if err != nil {
|
||||||
|
return c, err
|
||||||
|
}
|
||||||
|
if c.Quality < 1 || c.Quality > 100 {
|
||||||
|
return c, errors.New("quality ranges from 1 to 100 inclusive")
|
||||||
|
}
|
||||||
|
} else if part[0] == 'r' {
|
||||||
|
c.Rotate, err = strconv.Atoi(part[1:])
|
||||||
|
if err != nil {
|
||||||
|
return c, err
|
||||||
|
}
|
||||||
|
} else if strings.Contains(part, "x") {
|
||||||
|
widthHeight := strings.Split(part, "x")
|
||||||
|
if len(widthHeight) <= 2 {
|
||||||
|
first := widthHeight[0]
|
||||||
|
if first != "" {
|
||||||
|
c.Width, err = strconv.Atoi(first)
|
||||||
|
if err != nil {
|
||||||
|
return c, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(widthHeight) == 2 {
|
||||||
|
second := widthHeight[1]
|
||||||
|
if second != "" {
|
||||||
|
c.Height, err = strconv.Atoi(second)
|
||||||
|
if err != nil {
|
||||||
|
return c, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return c, errors.New("invalid image dimensions")
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.Width == 0 && c.Height == 0 {
|
||||||
|
return c, errors.New("must provide Width or Height")
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.FilterStr == "" {
|
||||||
|
c.FilterStr = defaults.ResampleFilter
|
||||||
|
c.Filter = imageFilters[c.FilterStr]
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.AnchorStr == "" {
|
||||||
|
c.AnchorStr = defaults.Anchor
|
||||||
|
if !strings.EqualFold(c.AnchorStr, SmartCropIdentifier) {
|
||||||
|
c.Anchor = anchorPositions[c.AnchorStr]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImageConfig holds configuration to create a new image from an existing one, resize etc.
|
||||||
|
type ImageConfig struct {
|
||||||
|
Action string
|
||||||
|
|
||||||
|
// Quality ranges from 1 to 100 inclusive, higher is better.
|
||||||
|
// This is only relevant for JPEG images.
|
||||||
|
// Default is 75.
|
||||||
|
Quality int
|
||||||
|
|
||||||
|
// Rotate rotates an image by the given angle counter-clockwise.
|
||||||
|
// The rotation will be performed first.
|
||||||
|
Rotate int
|
||||||
|
|
||||||
|
Width int
|
||||||
|
Height int
|
||||||
|
|
||||||
|
Filter imaging.ResampleFilter
|
||||||
|
FilterStr string
|
||||||
|
|
||||||
|
Anchor imaging.Anchor
|
||||||
|
AnchorStr string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i ImageConfig) Key(format imaging.Format) string {
|
||||||
|
k := strconv.Itoa(i.Width) + "x" + strconv.Itoa(i.Height)
|
||||||
|
if i.Action != "" {
|
||||||
|
k += "_" + i.Action
|
||||||
|
}
|
||||||
|
if i.Quality > 0 {
|
||||||
|
k += "_q" + strconv.Itoa(i.Quality)
|
||||||
|
}
|
||||||
|
if i.Rotate != 0 {
|
||||||
|
k += "_r" + strconv.Itoa(i.Rotate)
|
||||||
|
}
|
||||||
|
anchor := i.AnchorStr
|
||||||
|
if anchor == SmartCropIdentifier {
|
||||||
|
anchor = anchor + strconv.Itoa(smartCropVersionNumber)
|
||||||
|
}
|
||||||
|
|
||||||
|
k += "_" + i.FilterStr
|
||||||
|
|
||||||
|
if strings.EqualFold(i.Action, "fill") {
|
||||||
|
k += "_" + anchor
|
||||||
|
}
|
||||||
|
|
||||||
|
if v, ok := imageFormatsVersions[format]; ok {
|
||||||
|
k += "_" + strconv.Itoa(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
if mainImageVersionNumber > 0 {
|
||||||
|
k += "_" + strconv.Itoa(mainImageVersionNumber)
|
||||||
|
}
|
||||||
|
|
||||||
|
return k
|
||||||
|
}
|
||||||
|
|
||||||
|
// Imaging contains default image processing configuration. This will be fetched
|
||||||
|
// from site (or language) config.
|
||||||
|
type Imaging struct {
|
||||||
|
// Default image quality setting (1-100). Only used for JPEG images.
|
||||||
|
Quality int
|
||||||
|
|
||||||
|
// Resample filter used. See https://github.com/disintegration/imaging
|
||||||
|
ResampleFilter string
|
||||||
|
|
||||||
|
// The anchor used in Fill. Default is "smart", i.e. Smart Crop.
|
||||||
|
Anchor string
|
||||||
|
}
|
125
resources/images/config_test.go
Normal file
125
resources/images/config_test.go
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package images
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
qt "github.com/frankban/quicktest"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDecodeConfig(t *testing.T) {
|
||||||
|
c := qt.New(t)
|
||||||
|
m := map[string]interface{}{
|
||||||
|
"quality": 42,
|
||||||
|
"resampleFilter": "NearestNeighbor",
|
||||||
|
"anchor": "topLeft",
|
||||||
|
}
|
||||||
|
|
||||||
|
imaging, err := DecodeConfig(m)
|
||||||
|
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(imaging.Quality, qt.Equals, 42)
|
||||||
|
c.Assert(imaging.ResampleFilter, qt.Equals, "nearestneighbor")
|
||||||
|
c.Assert(imaging.Anchor, qt.Equals, "topleft")
|
||||||
|
|
||||||
|
m = map[string]interface{}{}
|
||||||
|
|
||||||
|
imaging, err = DecodeConfig(m)
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(imaging.Quality, qt.Equals, defaultJPEGQuality)
|
||||||
|
c.Assert(imaging.ResampleFilter, qt.Equals, "box")
|
||||||
|
c.Assert(imaging.Anchor, qt.Equals, "smart")
|
||||||
|
|
||||||
|
_, err = DecodeConfig(map[string]interface{}{
|
||||||
|
"quality": 123,
|
||||||
|
})
|
||||||
|
c.Assert(err, qt.Not(qt.IsNil))
|
||||||
|
|
||||||
|
_, err = DecodeConfig(map[string]interface{}{
|
||||||
|
"resampleFilter": "asdf",
|
||||||
|
})
|
||||||
|
c.Assert(err, qt.Not(qt.IsNil))
|
||||||
|
|
||||||
|
_, err = DecodeConfig(map[string]interface{}{
|
||||||
|
"anchor": "asdf",
|
||||||
|
})
|
||||||
|
c.Assert(err, qt.Not(qt.IsNil))
|
||||||
|
|
||||||
|
imaging, err = DecodeConfig(map[string]interface{}{
|
||||||
|
"anchor": "Smart",
|
||||||
|
})
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(imaging.Anchor, qt.Equals, "smart")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDecodeImageConfig(t *testing.T) {
|
||||||
|
for i, this := range []struct {
|
||||||
|
in string
|
||||||
|
expect interface{}
|
||||||
|
}{
|
||||||
|
{"300x400", newImageConfig(300, 400, 0, 0, "", "")},
|
||||||
|
{"100x200 bottomRight", newImageConfig(100, 200, 0, 0, "", "BottomRight")},
|
||||||
|
{"10x20 topleft Lanczos", newImageConfig(10, 20, 0, 0, "Lanczos", "topleft")},
|
||||||
|
{"linear left 10x r180", newImageConfig(10, 0, 0, 180, "linear", "left")},
|
||||||
|
{"x20 riGht Cosine q95", newImageConfig(0, 20, 95, 0, "cosine", "right")},
|
||||||
|
|
||||||
|
{"", false},
|
||||||
|
{"foo", false},
|
||||||
|
} {
|
||||||
|
|
||||||
|
result, err := DecodeImageConfig("resize", this.in, Imaging{})
|
||||||
|
if b, ok := this.expect.(bool); ok && !b {
|
||||||
|
if err == nil {
|
||||||
|
t.Errorf("[%d] parseImageConfig didn't return an expected error", i)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("[%d] err: %s", i, err)
|
||||||
|
}
|
||||||
|
if fmt.Sprint(result) != fmt.Sprint(this.expect) {
|
||||||
|
t.Fatalf("[%d] got\n%v\n but expected\n%v", i, result, this.expect)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newImageConfig(width, height, quality, rotate int, filter, anchor string) ImageConfig {
|
||||||
|
var c ImageConfig
|
||||||
|
c.Action = "resize"
|
||||||
|
c.Width = width
|
||||||
|
c.Height = height
|
||||||
|
c.Quality = quality
|
||||||
|
c.Rotate = rotate
|
||||||
|
|
||||||
|
if filter != "" {
|
||||||
|
filter = strings.ToLower(filter)
|
||||||
|
if v, ok := imageFilters[filter]; ok {
|
||||||
|
c.Filter = v
|
||||||
|
c.FilterStr = filter
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if anchor != "" {
|
||||||
|
anchor = strings.ToLower(anchor)
|
||||||
|
if v, ok := anchorPositions[anchor]; ok {
|
||||||
|
c.Anchor = v
|
||||||
|
c.AnchorStr = anchor
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
170
resources/images/image.go
Normal file
170
resources/images/image.go
Normal file
|
@ -0,0 +1,170 @@
|
||||||
|
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package images
|
||||||
|
|
||||||
|
import (
|
||||||
|
"image"
|
||||||
|
"image/jpeg"
|
||||||
|
"io"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/disintegration/imaging"
|
||||||
|
"github.com/gohugoio/hugo/common/hugio"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewImage(f imaging.Format, proc *ImageProcessor, img image.Image, s Spec) *Image {
|
||||||
|
if img != nil {
|
||||||
|
return &Image{
|
||||||
|
Format: f,
|
||||||
|
Proc: proc,
|
||||||
|
Spec: s,
|
||||||
|
imageConfig: &imageConfig{
|
||||||
|
config: imageConfigFromImage(img),
|
||||||
|
configLoaded: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &Image{Format: f, Proc: proc, Spec: s, imageConfig: &imageConfig{}}
|
||||||
|
}
|
||||||
|
|
||||||
|
type Image struct {
|
||||||
|
Format imaging.Format
|
||||||
|
|
||||||
|
Proc *ImageProcessor
|
||||||
|
|
||||||
|
Spec Spec
|
||||||
|
|
||||||
|
*imageConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Image) EncodeTo(conf ImageConfig, img image.Image, w io.Writer) error {
|
||||||
|
switch i.Format {
|
||||||
|
case imaging.JPEG:
|
||||||
|
|
||||||
|
var rgba *image.RGBA
|
||||||
|
quality := conf.Quality
|
||||||
|
|
||||||
|
if nrgba, ok := img.(*image.NRGBA); ok {
|
||||||
|
if nrgba.Opaque() {
|
||||||
|
rgba = &image.RGBA{
|
||||||
|
Pix: nrgba.Pix,
|
||||||
|
Stride: nrgba.Stride,
|
||||||
|
Rect: nrgba.Rect,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if rgba != nil {
|
||||||
|
return jpeg.Encode(w, rgba, &jpeg.Options{Quality: quality})
|
||||||
|
}
|
||||||
|
return jpeg.Encode(w, img, &jpeg.Options{Quality: quality})
|
||||||
|
default:
|
||||||
|
return imaging.Encode(w, img, i.Format)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Height returns i's height.
|
||||||
|
func (i *Image) Height() int {
|
||||||
|
i.initConfig()
|
||||||
|
return i.config.Height
|
||||||
|
}
|
||||||
|
|
||||||
|
// Width returns i's width.
|
||||||
|
func (i *Image) Width() int {
|
||||||
|
i.initConfig()
|
||||||
|
return i.config.Width
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i Image) WithImage(img image.Image) *Image {
|
||||||
|
i.Spec = nil
|
||||||
|
i.imageConfig = &imageConfig{
|
||||||
|
config: imageConfigFromImage(img),
|
||||||
|
configLoaded: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
return &i
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i Image) WithSpec(s Spec) *Image {
|
||||||
|
i.Spec = s
|
||||||
|
i.imageConfig = &imageConfig{}
|
||||||
|
return &i
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Image) initConfig() error {
|
||||||
|
var err error
|
||||||
|
i.configInit.Do(func() {
|
||||||
|
if i.configLoaded {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
f hugio.ReadSeekCloser
|
||||||
|
config image.Config
|
||||||
|
)
|
||||||
|
|
||||||
|
f, err = i.Spec.ReadSeekCloser()
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
config, _, err = image.DecodeConfig(f)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
i.config = config
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "failed to load image config")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type ImageProcessor struct {
|
||||||
|
Cfg Imaging
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *ImageProcessor) Fill(src image.Image, conf ImageConfig) (image.Image, error) {
|
||||||
|
if conf.AnchorStr == SmartCropIdentifier {
|
||||||
|
return smartCrop(src, conf.Width, conf.Height, conf.Anchor, conf.Filter)
|
||||||
|
}
|
||||||
|
return imaging.Fill(src, conf.Width, conf.Height, conf.Anchor, conf.Filter), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *ImageProcessor) Fit(src image.Image, conf ImageConfig) (image.Image, error) {
|
||||||
|
return imaging.Fit(src, conf.Width, conf.Height, conf.Filter), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *ImageProcessor) Resize(src image.Image, conf ImageConfig) (image.Image, error) {
|
||||||
|
return imaging.Resize(src, conf.Width, conf.Height, conf.Filter), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type Spec interface {
|
||||||
|
// Loads the image source.
|
||||||
|
ReadSeekCloser() (hugio.ReadSeekCloser, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type imageConfig struct {
|
||||||
|
config image.Config
|
||||||
|
configInit sync.Once
|
||||||
|
configLoaded bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func imageConfigFromImage(img image.Image) image.Config {
|
||||||
|
b := img.Bounds()
|
||||||
|
return image.Config{Width: b.Max.X, Height: b.Max.Y}
|
||||||
|
}
|
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package resources
|
package images
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"image"
|
"image"
|
||||||
|
@ -22,13 +22,18 @@ import (
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// Do not change.
|
// Do not change.
|
||||||
smartCropIdentifier = "smart"
|
// TODO(bep) image unexport
|
||||||
|
SmartCropIdentifier = "smart"
|
||||||
|
|
||||||
// This is just a increment, starting on 1. If Smart Crop improves its cropping, we
|
// This is just a increment, starting on 1. If Smart Crop improves its cropping, we
|
||||||
// need a way to trigger a re-generation of the crops in the wild, so increment this.
|
// need a way to trigger a re-generation of the crops in the wild, so increment this.
|
||||||
smartCropVersionNumber = 1
|
smartCropVersionNumber = 1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func newSmartCropAnalyzer(filter imaging.ResampleFilter) smartcrop.Analyzer {
|
||||||
|
return smartcrop.NewAnalyzer(imagingResizer{filter: filter})
|
||||||
|
}
|
||||||
|
|
||||||
// Needed by smartcrop
|
// Needed by smartcrop
|
||||||
type imagingResizer struct {
|
type imagingResizer struct {
|
||||||
filter imaging.ResampleFilter
|
filter imaging.ResampleFilter
|
||||||
|
@ -38,12 +43,7 @@ func (r imagingResizer) Resize(img image.Image, width, height uint) image.Image
|
||||||
return imaging.Resize(img, int(width), int(height), r.filter)
|
return imaging.Resize(img, int(width), int(height), r.filter)
|
||||||
}
|
}
|
||||||
|
|
||||||
func newSmartCropAnalyzer(filter imaging.ResampleFilter) smartcrop.Analyzer {
|
|
||||||
return smartcrop.NewAnalyzer(imagingResizer{filter: filter})
|
|
||||||
}
|
|
||||||
|
|
||||||
func smartCrop(img image.Image, width, height int, anchor imaging.Anchor, filter imaging.ResampleFilter) (*image.NRGBA, error) {
|
func smartCrop(img image.Image, width, height int, anchor imaging.Anchor, filter imaging.ResampleFilter) (*image.NRGBA, error) {
|
||||||
|
|
||||||
if width <= 0 || height <= 0 {
|
if width <= 0 || height <= 0 {
|
||||||
return &image.NRGBA{}, nil
|
return &image.NRGBA{}, nil
|
||||||
}
|
}
|
||||||
|
@ -63,7 +63,6 @@ func smartCrop(img image.Image, width, height int, anchor imaging.Anchor, filter
|
||||||
smart := newSmartCropAnalyzer(filter)
|
smart := newSmartCropAnalyzer(filter)
|
||||||
|
|
||||||
rect, err := smart.FindBestCrop(img, width, height)
|
rect, err := smart.FindBestCrop(img, width, height)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -73,5 +72,4 @@ func smartCrop(img image.Image, width, height int, anchor imaging.Anchor, filter
|
||||||
cropped := imaging.Crop(img, b)
|
cropped := imaging.Crop(img, b)
|
||||||
|
|
||||||
return imaging.Resize(cropped, width, height, filter), nil
|
return imaging.Resize(cropped, width, height, filter), nil
|
||||||
|
|
||||||
}
|
}
|
61
resources/internal/key.go
Normal file
61
resources/internal/key.go
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package internal
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
bp "github.com/gohugoio/hugo/bufferpool"
|
||||||
|
|
||||||
|
"github.com/mitchellh/hashstructure"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ResourceTransformationKey are provided by the different transformation implementations.
|
||||||
|
// It identifies the transformation (name) and its configuration (elements).
|
||||||
|
// We combine this in a chain with the rest of the transformations
|
||||||
|
// with the target filename and a content hash of the origin to use as cache key.
|
||||||
|
type ResourceTransformationKey struct {
|
||||||
|
Name string
|
||||||
|
elements []interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewResourceTransformationKey creates a new ResourceTransformationKey from the transformation
|
||||||
|
// name and elements. We will create a 64 bit FNV hash from the elements, which when combined
|
||||||
|
// with the other key elements should be unique for all practical applications.
|
||||||
|
func NewResourceTransformationKey(name string, elements ...interface{}) ResourceTransformationKey {
|
||||||
|
return ResourceTransformationKey{Name: name, elements: elements}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Value returns the Key as a string.
|
||||||
|
// Do not change this without good reasons.
|
||||||
|
func (k ResourceTransformationKey) Value() string {
|
||||||
|
if len(k.elements) == 0 {
|
||||||
|
return k.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
sb := bp.GetBuffer()
|
||||||
|
defer bp.PutBuffer(sb)
|
||||||
|
|
||||||
|
sb.WriteString(k.Name)
|
||||||
|
for _, element := range k.elements {
|
||||||
|
hash, err := hashstructure.Hash(element, nil)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
sb.WriteString("_")
|
||||||
|
sb.WriteString(strconv.FormatUint(hash, 10))
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
36
resources/internal/key_test.go
Normal file
36
resources/internal/key_test.go
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package internal
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
qt "github.com/frankban/quicktest"
|
||||||
|
)
|
||||||
|
|
||||||
|
type testStruct struct {
|
||||||
|
Name string
|
||||||
|
V1 int64
|
||||||
|
V2 int32
|
||||||
|
V3 int
|
||||||
|
V4 uint64
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestResourceTransformationKey(t *testing.T) {
|
||||||
|
// We really need this key to be portable across OSes.
|
||||||
|
key := NewResourceTransformationKey("testing",
|
||||||
|
testStruct{Name: "test", V1: int64(10), V2: int32(20), V3: 30, V4: uint64(40)})
|
||||||
|
c := qt.New(t)
|
||||||
|
c.Assert("testing_518996646957295636", qt.Equals, key.Value())
|
||||||
|
}
|
File diff suppressed because it is too large
Load diff
|
@ -23,7 +23,7 @@ import (
|
||||||
// Cloner is an internal template and not meant for use in the templates. It
|
// Cloner is an internal template and not meant for use in the templates. It
|
||||||
// may change without notice.
|
// may change without notice.
|
||||||
type Cloner interface {
|
type Cloner interface {
|
||||||
WithNewBase(base string) Resource
|
Clone() Resource
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resource represents a linkable resource, i.e. a content page, image etc.
|
// Resource represents a linkable resource, i.e. a content page, image etc.
|
||||||
|
@ -35,6 +35,20 @@ type Resource interface {
|
||||||
ResourceDataProvider
|
ResourceDataProvider
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Image represents an image resource.
|
||||||
|
type Image interface {
|
||||||
|
Resource
|
||||||
|
ImageOps
|
||||||
|
}
|
||||||
|
|
||||||
|
type ImageOps interface {
|
||||||
|
Height() int
|
||||||
|
Width() int
|
||||||
|
Fill(spec string) (Image, error)
|
||||||
|
Fit(spec string) (Image, error)
|
||||||
|
Resize(spec string) (Image, error)
|
||||||
|
}
|
||||||
|
|
||||||
type ResourceTypesProvider interface {
|
type ResourceTypesProvider interface {
|
||||||
// MediaType is this resource's MIME type.
|
// MediaType is this resource's MIME type.
|
||||||
MediaType() media.Type
|
MediaType() media.Type
|
||||||
|
@ -117,6 +131,10 @@ type OpenReadSeekCloser func() (hugio.ReadSeekCloser, error)
|
||||||
// ReadSeekCloserResource is a Resource that supports loading its content.
|
// ReadSeekCloserResource is a Resource that supports loading its content.
|
||||||
type ReadSeekCloserResource interface {
|
type ReadSeekCloserResource interface {
|
||||||
MediaType() media.Type
|
MediaType() media.Type
|
||||||
|
ReadSeekCloserProvider
|
||||||
|
}
|
||||||
|
|
||||||
|
type ReadSeekCloserProvider interface {
|
||||||
ReadSeekCloser() (hugio.ReadSeekCloser, error)
|
ReadSeekCloser() (hugio.ReadSeekCloser, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -281,7 +281,7 @@ func (c *ResourceCache) DeletePartitions(partitions ...string) {
|
||||||
|
|
||||||
for k := range c.cache {
|
for k := range c.cache {
|
||||||
clear := false
|
clear := false
|
||||||
for p, _ := range partitionsSet {
|
for p := range partitionsSet {
|
||||||
if strings.Contains(k, p) {
|
if strings.Contains(k, p) {
|
||||||
// There will be some false positive, but that's fine.
|
// There will be some false positive, but that's fine.
|
||||||
clear = true
|
clear = true
|
||||||
|
|
|
@ -30,8 +30,14 @@ import (
|
||||||
|
|
||||||
var (
|
var (
|
||||||
_ metaAssigner = (*genericResource)(nil)
|
_ metaAssigner = (*genericResource)(nil)
|
||||||
|
_ metaAssigner = (*imageResource)(nil)
|
||||||
|
_ metaAssignerProvider = (*resourceAdapter)(nil)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type metaAssignerProvider interface {
|
||||||
|
getMetaAssigner() metaAssigner
|
||||||
|
}
|
||||||
|
|
||||||
// metaAssigner allows updating metadata in resources that supports it.
|
// metaAssigner allows updating metadata in resources that supports it.
|
||||||
type metaAssigner interface {
|
type metaAssigner interface {
|
||||||
setTitle(title string)
|
setTitle(title string)
|
||||||
|
@ -50,9 +56,16 @@ func AssignMetadata(metadata []map[string]interface{}, resources ...resource.Res
|
||||||
counters := make(map[string]int)
|
counters := make(map[string]int)
|
||||||
|
|
||||||
for _, r := range resources {
|
for _, r := range resources {
|
||||||
if _, ok := r.(metaAssigner); !ok {
|
var ma metaAssigner
|
||||||
|
mp, ok := r.(metaAssignerProvider)
|
||||||
|
if ok {
|
||||||
|
ma = mp.getMetaAssigner()
|
||||||
|
} else {
|
||||||
|
ma, ok = r.(metaAssigner)
|
||||||
|
if !ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
nameSet, titleSet bool
|
nameSet, titleSet bool
|
||||||
|
@ -61,7 +74,6 @@ func AssignMetadata(metadata []map[string]interface{}, resources ...resource.Res
|
||||||
resourceSrcKey = strings.ToLower(r.Name())
|
resourceSrcKey = strings.ToLower(r.Name())
|
||||||
)
|
)
|
||||||
|
|
||||||
ma := r.(metaAssigner)
|
|
||||||
for _, meta := range metadata {
|
for _, meta := range metadata {
|
||||||
src, found := meta["src"]
|
src, found := meta["src"]
|
||||||
if !found {
|
if !found {
|
||||||
|
|
|
@ -24,7 +24,7 @@ import (
|
||||||
|
|
||||||
func TestAssignMetadata(t *testing.T) {
|
func TestAssignMetadata(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
spec := newTestResourceSpec(c)
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
var foo1, foo2, foo3, logo1, logo2, logo3 resource.Resource
|
var foo1, foo2, foo3, logo1, logo2, logo3 resource.Resource
|
||||||
var resources resource.Resources
|
var resources resource.Resources
|
||||||
|
|
304
resources/resource_spec.go
Normal file
304
resources/resource_spec.go
Normal file
|
@ -0,0 +1,304 @@
|
||||||
|
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package resources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"mime"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/cache/filecache"
|
||||||
|
"github.com/gohugoio/hugo/common/loggers"
|
||||||
|
"github.com/gohugoio/hugo/media"
|
||||||
|
"github.com/gohugoio/hugo/output"
|
||||||
|
"github.com/gohugoio/hugo/resources/images"
|
||||||
|
"github.com/gohugoio/hugo/resources/page"
|
||||||
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
|
"github.com/gohugoio/hugo/tpl"
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewSpec(
|
||||||
|
s *helpers.PathSpec,
|
||||||
|
fileCaches filecache.Caches,
|
||||||
|
logger *loggers.Logger,
|
||||||
|
outputFormats output.Formats,
|
||||||
|
mimeTypes media.Types) (*Spec, error) {
|
||||||
|
|
||||||
|
imgConfig, err := images.DecodeConfig(s.Cfg.GetStringMap("imaging"))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
imaging := &images.ImageProcessor{Cfg: imgConfig}
|
||||||
|
|
||||||
|
if logger == nil {
|
||||||
|
logger = loggers.NewErrorLogger()
|
||||||
|
}
|
||||||
|
|
||||||
|
permalinks, err := page.NewPermalinkExpander(s)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
rs := &Spec{PathSpec: s,
|
||||||
|
Logger: logger,
|
||||||
|
imaging: imaging,
|
||||||
|
MediaTypes: mimeTypes,
|
||||||
|
OutputFormats: outputFormats,
|
||||||
|
Permalinks: permalinks,
|
||||||
|
FileCaches: fileCaches,
|
||||||
|
imageCache: newImageCache(
|
||||||
|
fileCaches.ImageCache(),
|
||||||
|
|
||||||
|
s,
|
||||||
|
)}
|
||||||
|
|
||||||
|
rs.ResourceCache = newResourceCache(rs)
|
||||||
|
|
||||||
|
return rs, nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
type Spec struct {
|
||||||
|
*helpers.PathSpec
|
||||||
|
|
||||||
|
MediaTypes media.Types
|
||||||
|
OutputFormats output.Formats
|
||||||
|
|
||||||
|
Logger *loggers.Logger
|
||||||
|
|
||||||
|
TextTemplates tpl.TemplateParseFinder
|
||||||
|
|
||||||
|
Permalinks page.PermalinkExpander
|
||||||
|
|
||||||
|
// Holds default filter settings etc.
|
||||||
|
imaging *images.ImageProcessor
|
||||||
|
|
||||||
|
imageCache *imageCache
|
||||||
|
ResourceCache *ResourceCache
|
||||||
|
FileCaches filecache.Caches
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Spec) New(fd ResourceSourceDescriptor) (resource.Resource, error) {
|
||||||
|
return r.newResourceFor(fd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Spec) CacheStats() string {
|
||||||
|
r.imageCache.mu.RLock()
|
||||||
|
defer r.imageCache.mu.RUnlock()
|
||||||
|
|
||||||
|
s := fmt.Sprintf("Cache entries: %d", len(r.imageCache.store))
|
||||||
|
|
||||||
|
count := 0
|
||||||
|
for k := range r.imageCache.store {
|
||||||
|
if count > 5 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
s += "\n" + k
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Spec) ClearCaches() {
|
||||||
|
r.imageCache.clear()
|
||||||
|
r.ResourceCache.clear()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Spec) DeleteCacheByPrefix(prefix string) {
|
||||||
|
r.imageCache.deleteByPrefix(prefix)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(bep) unify
|
||||||
|
func (r *Spec) IsInImageCache(key string) bool {
|
||||||
|
// This is used for cache pruning. We currently only have images, but we could
|
||||||
|
// imagine expanding on this.
|
||||||
|
return r.imageCache.isInCache(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Spec) String() string {
|
||||||
|
return "spec"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(bep) clean up below
|
||||||
|
func (r *Spec) newGenericResource(sourceFs afero.Fs,
|
||||||
|
targetPathBuilder func() page.TargetPaths,
|
||||||
|
osFileInfo os.FileInfo,
|
||||||
|
sourceFilename,
|
||||||
|
baseFilename string,
|
||||||
|
mediaType media.Type) *genericResource {
|
||||||
|
return r.newGenericResourceWithBase(
|
||||||
|
sourceFs,
|
||||||
|
nil,
|
||||||
|
nil,
|
||||||
|
targetPathBuilder,
|
||||||
|
osFileInfo,
|
||||||
|
sourceFilename,
|
||||||
|
baseFilename,
|
||||||
|
mediaType,
|
||||||
|
)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Spec) newGenericResourceWithBase(
|
||||||
|
sourceFs afero.Fs,
|
||||||
|
openReadSeekerCloser resource.OpenReadSeekCloser,
|
||||||
|
targetPathBaseDirs []string,
|
||||||
|
targetPathBuilder func() page.TargetPaths,
|
||||||
|
osFileInfo os.FileInfo,
|
||||||
|
sourceFilename,
|
||||||
|
baseFilename string,
|
||||||
|
mediaType media.Type) *genericResource {
|
||||||
|
|
||||||
|
if osFileInfo != nil && osFileInfo.IsDir() {
|
||||||
|
panic(fmt.Sprintf("dirs not supported resource types: %v", osFileInfo))
|
||||||
|
}
|
||||||
|
|
||||||
|
// This value is used both to construct URLs and file paths, but start
|
||||||
|
// with a Unix-styled path.
|
||||||
|
baseFilename = helpers.ToSlashTrimLeading(baseFilename)
|
||||||
|
fpath, fname := path.Split(baseFilename)
|
||||||
|
|
||||||
|
var resourceType string
|
||||||
|
if mediaType.MainType == "image" {
|
||||||
|
resourceType = mediaType.MainType
|
||||||
|
} else {
|
||||||
|
resourceType = mediaType.SubType
|
||||||
|
}
|
||||||
|
|
||||||
|
pathDescriptor := &resourcePathDescriptor{
|
||||||
|
baseTargetPathDirs: helpers.UniqueStringsReuse(targetPathBaseDirs),
|
||||||
|
targetPathBuilder: targetPathBuilder,
|
||||||
|
relTargetDirFile: dirFile{dir: fpath, file: fname},
|
||||||
|
}
|
||||||
|
|
||||||
|
gfi := &resourceFileInfo{
|
||||||
|
fi: osFileInfo,
|
||||||
|
openReadSeekerCloser: openReadSeekerCloser,
|
||||||
|
sourceFs: sourceFs,
|
||||||
|
sourceFilename: sourceFilename,
|
||||||
|
h: &resourceHash{},
|
||||||
|
}
|
||||||
|
|
||||||
|
g := &genericResource{
|
||||||
|
resourceFileInfo: gfi,
|
||||||
|
resourcePathDescriptor: pathDescriptor,
|
||||||
|
mediaType: mediaType,
|
||||||
|
resourceType: resourceType,
|
||||||
|
spec: r,
|
||||||
|
params: make(map[string]interface{}),
|
||||||
|
name: baseFilename,
|
||||||
|
title: baseFilename,
|
||||||
|
resourceContent: &resourceContent{},
|
||||||
|
}
|
||||||
|
|
||||||
|
return g
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Spec) newResource(sourceFs afero.Fs, fd ResourceSourceDescriptor) (resource.Resource, error) {
|
||||||
|
fi := fd.FileInfo
|
||||||
|
var sourceFilename string
|
||||||
|
|
||||||
|
if fd.OpenReadSeekCloser != nil {
|
||||||
|
} else if fd.SourceFilename != "" {
|
||||||
|
var err error
|
||||||
|
fi, err = sourceFs.Stat(fd.SourceFilename)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sourceFilename = fd.SourceFilename
|
||||||
|
} else {
|
||||||
|
sourceFilename = fd.SourceFile.Filename()
|
||||||
|
}
|
||||||
|
|
||||||
|
if fd.RelTargetFilename == "" {
|
||||||
|
fd.RelTargetFilename = sourceFilename
|
||||||
|
}
|
||||||
|
|
||||||
|
ext := strings.ToLower(filepath.Ext(fd.RelTargetFilename))
|
||||||
|
mimeType, found := r.MediaTypes.GetFirstBySuffix(strings.TrimPrefix(ext, "."))
|
||||||
|
// TODO(bep) we need to handle these ambigous types better, but in this context
|
||||||
|
// we most likely want the application/xml type.
|
||||||
|
if mimeType.Suffix() == "xml" && mimeType.SubType == "rss" {
|
||||||
|
mimeType, found = r.MediaTypes.GetByType("application/xml")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found {
|
||||||
|
// A fallback. Note that mime.TypeByExtension is slow by Hugo standards,
|
||||||
|
// so we should configure media types to avoid this lookup for most
|
||||||
|
// situations.
|
||||||
|
mimeStr := mime.TypeByExtension(ext)
|
||||||
|
if mimeStr != "" {
|
||||||
|
mimeType, _ = media.FromStringAndExt(mimeStr, ext)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
gr := r.newGenericResourceWithBase(
|
||||||
|
sourceFs,
|
||||||
|
fd.OpenReadSeekCloser,
|
||||||
|
fd.TargetBasePaths,
|
||||||
|
fd.TargetPaths,
|
||||||
|
fi,
|
||||||
|
sourceFilename,
|
||||||
|
fd.RelTargetFilename,
|
||||||
|
mimeType)
|
||||||
|
|
||||||
|
if mimeType.MainType == "image" {
|
||||||
|
imgFormat, ok := images.ImageFormatFromExt(ext)
|
||||||
|
if ok {
|
||||||
|
ir := &imageResource{
|
||||||
|
Image: images.NewImage(imgFormat, r.imaging, nil, gr),
|
||||||
|
baseResource: gr,
|
||||||
|
}
|
||||||
|
return newResourceAdapter(gr.spec, fd.LazyPublish, ir), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return newResourceAdapter(gr.spec, fd.LazyPublish, gr), nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Spec) newResourceFor(fd ResourceSourceDescriptor) (resource.Resource, error) {
|
||||||
|
if fd.OpenReadSeekCloser == nil {
|
||||||
|
if fd.SourceFile != nil && fd.SourceFilename != "" {
|
||||||
|
return nil, errors.New("both SourceFile and AbsSourceFilename provided")
|
||||||
|
} else if fd.SourceFile == nil && fd.SourceFilename == "" {
|
||||||
|
return nil, errors.New("either SourceFile or AbsSourceFilename must be provided")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fd.RelTargetFilename == "" {
|
||||||
|
fd.RelTargetFilename = fd.Filename()
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(fd.TargetBasePaths) == 0 {
|
||||||
|
// If not set, we publish the same resource to all hosts.
|
||||||
|
fd.TargetBasePaths = r.MultihostTargetBasePaths
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.newResource(fd.Fs, fd)
|
||||||
|
}
|
|
@ -32,7 +32,7 @@ import (
|
||||||
|
|
||||||
func TestGenericResource(t *testing.T) {
|
func TestGenericResource(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
spec := newTestResourceSpec(c)
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
r := spec.newGenericResource(nil, nil, nil, "/a/foo.css", "foo.css", media.CSSType)
|
r := spec.newGenericResource(nil, nil, nil, "/a/foo.css", "foo.css", media.CSSType)
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ func TestGenericResource(t *testing.T) {
|
||||||
|
|
||||||
func TestGenericResourceWithLinkFacory(t *testing.T) {
|
func TestGenericResourceWithLinkFacory(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
spec := newTestResourceSpec(c)
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
factory := newTargetPaths("/foo")
|
factory := newTargetPaths("/foo")
|
||||||
|
|
||||||
|
@ -58,7 +58,7 @@ func TestGenericResourceWithLinkFacory(t *testing.T) {
|
||||||
|
|
||||||
func TestNewResourceFromFilename(t *testing.T) {
|
func TestNewResourceFromFilename(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
spec := newTestResourceSpec(c)
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
writeSource(t, spec.Fs, "content/a/b/logo.png", "image")
|
writeSource(t, spec.Fs, "content/a/b/logo.png", "image")
|
||||||
writeSource(t, spec.Fs, "content/a/b/data.json", "json")
|
writeSource(t, spec.Fs, "content/a/b/data.json", "json")
|
||||||
|
@ -79,14 +79,11 @@ func TestNewResourceFromFilename(t *testing.T) {
|
||||||
c.Assert(r, qt.Not(qt.IsNil))
|
c.Assert(r, qt.Not(qt.IsNil))
|
||||||
c.Assert(r.ResourceType(), qt.Equals, "json")
|
c.Assert(r.ResourceType(), qt.Equals, "json")
|
||||||
|
|
||||||
cloned := r.(resource.Cloner).WithNewBase("aceof")
|
|
||||||
c.Assert(cloned.ResourceType(), qt.Equals, r.ResourceType())
|
|
||||||
c.Assert(cloned.RelPermalink(), qt.Equals, "/aceof/a/b/data.json")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNewResourceFromFilenameSubPathInBaseURL(t *testing.T) {
|
func TestNewResourceFromFilenameSubPathInBaseURL(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
spec := newTestResourceSpecForBaseURL(c, "https://example.com/docs")
|
spec := newTestResourceSpec(specDescriptor{c: c, baseURL: "https://example.com/docs"})
|
||||||
|
|
||||||
writeSource(t, spec.Fs, "content/a/b/logo.png", "image")
|
writeSource(t, spec.Fs, "content/a/b/logo.png", "image")
|
||||||
bfs := afero.NewBasePathFs(spec.Fs.Source, "content")
|
bfs := afero.NewBasePathFs(spec.Fs.Source, "content")
|
||||||
|
@ -99,8 +96,6 @@ func TestNewResourceFromFilenameSubPathInBaseURL(t *testing.T) {
|
||||||
c.Assert(r.ResourceType(), qt.Equals, "image")
|
c.Assert(r.ResourceType(), qt.Equals, "image")
|
||||||
c.Assert(r.RelPermalink(), qt.Equals, "/docs/a/b/logo.png")
|
c.Assert(r.RelPermalink(), qt.Equals, "/docs/a/b/logo.png")
|
||||||
c.Assert(r.Permalink(), qt.Equals, "https://example.com/docs/a/b/logo.png")
|
c.Assert(r.Permalink(), qt.Equals, "https://example.com/docs/a/b/logo.png")
|
||||||
img := r.(*Image)
|
|
||||||
c.Assert(img.targetFilenames()[0], qt.Equals, filepath.FromSlash("/a/b/logo.png"))
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,7 +103,7 @@ var pngType, _ = media.FromStringAndExt("image/png", "png")
|
||||||
|
|
||||||
func TestResourcesByType(t *testing.T) {
|
func TestResourcesByType(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
spec := newTestResourceSpec(c)
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
resources := resource.Resources{
|
resources := resource.Resources{
|
||||||
spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType),
|
spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType),
|
||||||
spec.newGenericResource(nil, nil, nil, "/a/logo.png", "logo.css", pngType),
|
spec.newGenericResource(nil, nil, nil, "/a/logo.png", "logo.css", pngType),
|
||||||
|
@ -122,7 +117,7 @@ func TestResourcesByType(t *testing.T) {
|
||||||
|
|
||||||
func TestResourcesGetByPrefix(t *testing.T) {
|
func TestResourcesGetByPrefix(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
spec := newTestResourceSpec(c)
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
resources := resource.Resources{
|
resources := resource.Resources{
|
||||||
spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType),
|
spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType),
|
||||||
spec.newGenericResource(nil, nil, nil, "/a/logo1.png", "logo1.png", pngType),
|
spec.newGenericResource(nil, nil, nil, "/a/logo1.png", "logo1.png", pngType),
|
||||||
|
@ -151,7 +146,7 @@ func TestResourcesGetByPrefix(t *testing.T) {
|
||||||
|
|
||||||
func TestResourcesGetMatch(t *testing.T) {
|
func TestResourcesGetMatch(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
spec := newTestResourceSpec(c)
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
resources := resource.Resources{
|
resources := resource.Resources{
|
||||||
spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType),
|
spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType),
|
||||||
spec.newGenericResource(nil, nil, nil, "/a/logo1.png", "logo1.png", pngType),
|
spec.newGenericResource(nil, nil, nil, "/a/logo1.png", "logo1.png", pngType),
|
||||||
|
@ -213,7 +208,7 @@ func BenchmarkResourcesMatch(b *testing.B) {
|
||||||
// my own curiosity.
|
// my own curiosity.
|
||||||
func BenchmarkResourcesMatchA100(b *testing.B) {
|
func BenchmarkResourcesMatchA100(b *testing.B) {
|
||||||
c := qt.New(b)
|
c := qt.New(b)
|
||||||
spec := newTestResourceSpec(c)
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
a100 := strings.Repeat("a", 100)
|
a100 := strings.Repeat("a", 100)
|
||||||
pattern := "a*a*a*a*a*a*a*a*b"
|
pattern := "a*a*a*a*a*a*a*a*b"
|
||||||
|
|
||||||
|
@ -228,7 +223,7 @@ func BenchmarkResourcesMatchA100(b *testing.B) {
|
||||||
|
|
||||||
func benchResources(b *testing.B) resource.Resources {
|
func benchResources(b *testing.B) resource.Resources {
|
||||||
c := qt.New(b)
|
c := qt.New(b)
|
||||||
spec := newTestResourceSpec(c)
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
var resources resource.Resources
|
var resources resource.Resources
|
||||||
|
|
||||||
for i := 0; i < 30; i++ {
|
for i := 0; i < 30; i++ {
|
||||||
|
@ -252,7 +247,7 @@ func benchResources(b *testing.B) resource.Resources {
|
||||||
|
|
||||||
func BenchmarkAssignMetadata(b *testing.B) {
|
func BenchmarkAssignMetadata(b *testing.B) {
|
||||||
c := qt.New(b)
|
c := qt.New(b)
|
||||||
spec := newTestResourceSpec(c)
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
b.StopTimer()
|
b.StopTimer()
|
||||||
|
|
80
resources/resource_transformers/htesting/testhelpers.go
Normal file
80
resources/resource_transformers/htesting/testhelpers.go
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package htesting
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/cache/filecache"
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
"github.com/gohugoio/hugo/media"
|
||||||
|
"github.com/gohugoio/hugo/output"
|
||||||
|
"github.com/gohugoio/hugo/resources"
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
"github.com/spf13/viper"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewTestResourceSpec() (*resources.Spec, error) {
|
||||||
|
cfg := viper.New()
|
||||||
|
cfg.Set("baseURL", "https://example.org")
|
||||||
|
cfg.Set("publishDir", "public")
|
||||||
|
|
||||||
|
imagingCfg := map[string]interface{}{
|
||||||
|
"resampleFilter": "linear",
|
||||||
|
"quality": 68,
|
||||||
|
"anchor": "left",
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg.Set("imaging", imagingCfg)
|
||||||
|
|
||||||
|
fs := hugofs.NewMem(cfg)
|
||||||
|
|
||||||
|
s, err := helpers.NewPathSpec(fs, cfg, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
filecaches, err := filecache.NewCaches(s)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
spec, err := resources.NewSpec(s, filecaches, nil, output.DefaultFormats, media.DefaultTypes)
|
||||||
|
return spec, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewResourceTransformer(filename, content string) (resources.ResourceTransformer, error) {
|
||||||
|
spec, err := NewTestResourceSpec()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return NewResourceTransformerForSpec(spec, filename, content)
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewResourceTransformerForSpec(spec *resources.Spec, filename, content string) (resources.ResourceTransformer, error) {
|
||||||
|
filename = filepath.FromSlash(filename)
|
||||||
|
|
||||||
|
fs := spec.Fs.Source
|
||||||
|
if err := afero.WriteFile(fs, filename, []byte(content), 0777); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
r, err := spec.New(resources.ResourceSourceDescriptor{Fs: fs, SourceFilename: filename})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.(resources.ResourceTransformer), nil
|
||||||
|
}
|
|
@ -23,6 +23,8 @@ import (
|
||||||
"html/template"
|
"html/template"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/resources/internal"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/resources"
|
"github.com/gohugoio/hugo/resources"
|
||||||
|
@ -46,8 +48,8 @@ type fingerprintTransformation struct {
|
||||||
algo string
|
algo string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *fingerprintTransformation) Key() resources.ResourceTransformationKey {
|
func (t *fingerprintTransformation) Key() internal.ResourceTransformationKey {
|
||||||
return resources.NewResourceTransformationKey("fingerprint", t.algo)
|
return internal.NewResourceTransformationKey("fingerprint", t.algo)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Transform creates a MD5 hash of the Resource content and inserts that hash before
|
// Transform creates a MD5 hash of the Resource content and inserts that hash before
|
||||||
|
@ -59,7 +61,17 @@ func (t *fingerprintTransformation) Transform(ctx *resources.ResourceTransformat
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
io.Copy(io.MultiWriter(h, ctx.To), ctx.From)
|
var w io.Writer
|
||||||
|
if rc, ok := ctx.From.(io.ReadSeeker); ok {
|
||||||
|
// This transformation does not change the content, so try to
|
||||||
|
// avoid writing to To if we can.
|
||||||
|
defer rc.Seek(0, 0)
|
||||||
|
w = h
|
||||||
|
} else {
|
||||||
|
w = io.MultiWriter(h, ctx.To)
|
||||||
|
}
|
||||||
|
|
||||||
|
io.Copy(w, ctx.From)
|
||||||
d, err := digest(h)
|
d, err := digest(h)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -91,15 +103,12 @@ func newHash(algo string) (hash.Hash, error) {
|
||||||
// the base64-encoded Subresource Integrity hash, so you will have to stay away from
|
// the base64-encoded Subresource Integrity hash, so you will have to stay away from
|
||||||
// md5 if you plan to use both.
|
// md5 if you plan to use both.
|
||||||
// See https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity
|
// See https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity
|
||||||
func (c *Client) Fingerprint(res resource.Resource, algo string) (resource.Resource, error) {
|
func (c *Client) Fingerprint(res resources.ResourceTransformer, algo string) (resource.Resource, error) {
|
||||||
if algo == "" {
|
if algo == "" {
|
||||||
algo = defaultHashAlgo
|
algo = defaultHashAlgo
|
||||||
}
|
}
|
||||||
|
|
||||||
return c.rs.Transform(
|
return res.Transform(&fingerprintTransformation{algo: algo})
|
||||||
res,
|
|
||||||
&fingerprintTransformation{algo: algo},
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func integrity(algo string, sum []byte) template.HTMLAttr {
|
func integrity(algo string, sum []byte) template.HTMLAttr {
|
||||||
|
|
|
@ -14,9 +14,13 @@
|
||||||
package integrity
|
package integrity
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"html/template"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
|
"github.com/gohugoio/hugo/resources/resource_transformers/htesting"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestHashFromAlgo(t *testing.T) {
|
func TestHashFromAlgo(t *testing.T) {
|
||||||
|
@ -46,3 +50,23 @@ func TestHashFromAlgo(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTransform(t *testing.T) {
|
||||||
|
c := qt.New(t)
|
||||||
|
|
||||||
|
spec, err := htesting.NewTestResourceSpec()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
client := New(spec)
|
||||||
|
|
||||||
|
r, err := htesting.NewResourceTransformerForSpec(spec, "hugo.txt", "Hugo Rocks!")
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
|
transformed, err := client.Fingerprint(r, "")
|
||||||
|
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(transformed.RelPermalink(), qt.Equals, "/hugo.a5ad1c6961214a55de53c1ce6e60d27b6b761f54851fa65e33066460dfa6a0db.txt")
|
||||||
|
c.Assert(transformed.Data(), qt.DeepEquals, map[string]interface{}{"Integrity": template.HTMLAttr("sha256-pa0caWEhSlXeU8HObmDSe2t2H1SFH6ZeMwZkYN+moNs=")})
|
||||||
|
content, err := transformed.(resource.ContentProvider).Content()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(content, qt.Equals, "Hugo Rocks!")
|
||||||
|
}
|
||||||
|
|
|
@ -16,6 +16,7 @@ package minifier
|
||||||
import (
|
import (
|
||||||
"github.com/gohugoio/hugo/minifiers"
|
"github.com/gohugoio/hugo/minifiers"
|
||||||
"github.com/gohugoio/hugo/resources"
|
"github.com/gohugoio/hugo/resources"
|
||||||
|
"github.com/gohugoio/hugo/resources/internal"
|
||||||
"github.com/gohugoio/hugo/resources/resource"
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -37,8 +38,8 @@ type minifyTransformation struct {
|
||||||
m minifiers.Client
|
m minifiers.Client
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *minifyTransformation) Key() resources.ResourceTransformationKey {
|
func (t *minifyTransformation) Key() internal.ResourceTransformationKey {
|
||||||
return resources.NewResourceTransformationKey("minify")
|
return internal.NewResourceTransformationKey("minify")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *minifyTransformation) Transform(ctx *resources.ResourceTransformationCtx) error {
|
func (t *minifyTransformation) Transform(ctx *resources.ResourceTransformationCtx) error {
|
||||||
|
@ -49,11 +50,10 @@ func (t *minifyTransformation) Transform(ctx *resources.ResourceTransformationCt
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) Minify(res resource.Resource) (resource.Resource, error) {
|
func (c *Client) Minify(res resources.ResourceTransformer) (resource.Resource, error) {
|
||||||
return c.rs.Transform(
|
return res.Transform(&minifyTransformation{
|
||||||
res,
|
|
||||||
&minifyTransformation{
|
|
||||||
rs: c.rs,
|
rs: c.rs,
|
||||||
m: c.m},
|
m: c.m,
|
||||||
)
|
})
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
43
resources/resource_transformers/minifier/minify_test.go
Normal file
43
resources/resource_transformers/minifier/minify_test.go
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package minifier
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
|
|
||||||
|
qt "github.com/frankban/quicktest"
|
||||||
|
"github.com/gohugoio/hugo/resources/resource_transformers/htesting"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestTransform(t *testing.T) {
|
||||||
|
c := qt.New(t)
|
||||||
|
|
||||||
|
spec, err := htesting.NewTestResourceSpec()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
client := New(spec)
|
||||||
|
|
||||||
|
r, err := htesting.NewResourceTransformerForSpec(spec, "hugo.html", "<h1> Hugo Rocks! </h1>")
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
|
transformed, err := client.Minify(r)
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
|
c.Assert(transformed.RelPermalink(), qt.Equals, "/hugo.min.html")
|
||||||
|
content, err := transformed.(resource.ContentProvider).Content()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(content, qt.Equals, "<h1>Hugo Rocks!</h1>")
|
||||||
|
|
||||||
|
}
|
|
@ -17,6 +17,7 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/resources/internal"
|
||||||
"github.com/spf13/cast"
|
"github.com/spf13/cast"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
@ -98,8 +99,8 @@ type postcssTransformation struct {
|
||||||
rs *resources.Spec
|
rs *resources.Spec
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *postcssTransformation) Key() resources.ResourceTransformationKey {
|
func (t *postcssTransformation) Key() internal.ResourceTransformationKey {
|
||||||
return resources.NewResourceTransformationKey("postcss", t.options)
|
return internal.NewResourceTransformationKey("postcss", t.options)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Transform shells out to postcss-cli to do the heavy lifting.
|
// Transform shells out to postcss-cli to do the heavy lifting.
|
||||||
|
@ -187,9 +188,6 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process transforms the given Resource with the PostCSS processor.
|
// Process transforms the given Resource with the PostCSS processor.
|
||||||
func (c *Client) Process(res resource.Resource, options Options) (resource.Resource, error) {
|
func (c *Client) Process(res resources.ResourceTransformer, options Options) (resource.Resource, error) {
|
||||||
return c.rs.Transform(
|
return res.Transform(&postcssTransformation{rs: c.rs, options: options})
|
||||||
res,
|
|
||||||
&postcssTransformation{rs: c.rs, options: options},
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@ package templates
|
||||||
import (
|
import (
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
"github.com/gohugoio/hugo/resources"
|
"github.com/gohugoio/hugo/resources"
|
||||||
|
"github.com/gohugoio/hugo/resources/internal"
|
||||||
"github.com/gohugoio/hugo/resources/resource"
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
"github.com/gohugoio/hugo/tpl"
|
"github.com/gohugoio/hugo/tpl"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
@ -47,8 +48,8 @@ type executeAsTemplateTransform struct {
|
||||||
data interface{}
|
data interface{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *executeAsTemplateTransform) Key() resources.ResourceTransformationKey {
|
func (t *executeAsTemplateTransform) Key() internal.ResourceTransformationKey {
|
||||||
return resources.NewResourceTransformationKey("execute-as-template", t.targetPath)
|
return internal.NewResourceTransformationKey("execute-as-template", t.targetPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *executeAsTemplateTransform) Transform(ctx *resources.ResourceTransformationCtx) error {
|
func (t *executeAsTemplateTransform) Transform(ctx *resources.ResourceTransformationCtx) error {
|
||||||
|
@ -63,14 +64,11 @@ func (t *executeAsTemplateTransform) Transform(ctx *resources.ResourceTransforma
|
||||||
return templ.Execute(ctx.To, t.data)
|
return templ.Execute(ctx.To, t.data)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) ExecuteAsTemplate(res resource.Resource, targetPath string, data interface{}) (resource.Resource, error) {
|
func (c *Client) ExecuteAsTemplate(res resources.ResourceTransformer, targetPath string, data interface{}) (resource.Resource, error) {
|
||||||
return c.rs.Transform(
|
return res.Transform(&executeAsTemplateTransform{
|
||||||
res,
|
|
||||||
&executeAsTemplateTransform{
|
|
||||||
rs: c.rs,
|
rs: c.rs,
|
||||||
targetPath: helpers.ToSlashTrimLeading(targetPath),
|
targetPath: helpers.ToSlashTrimLeading(targetPath),
|
||||||
textTemplate: c.textTemplate,
|
textTemplate: c.textTemplate,
|
||||||
data: data,
|
data: data,
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@ import (
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
"github.com/gohugoio/hugo/hugolib/filesystems"
|
"github.com/gohugoio/hugo/hugolib/filesystems"
|
||||||
"github.com/gohugoio/hugo/resources"
|
"github.com/gohugoio/hugo/resources"
|
||||||
|
"github.com/gohugoio/hugo/resources/internal"
|
||||||
"github.com/gohugoio/hugo/resources/resource"
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
|
|
||||||
|
@ -68,7 +69,7 @@ type options struct {
|
||||||
to scss.Options
|
to scss.Options
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) ToCSS(res resource.Resource, opts Options) (resource.Resource, error) {
|
func (c *Client) ToCSS(res resources.ResourceTransformer, opts Options) (resource.Resource, error) {
|
||||||
internalOptions := options{
|
internalOptions := options{
|
||||||
from: opts,
|
from: opts,
|
||||||
}
|
}
|
||||||
|
@ -83,10 +84,7 @@ func (c *Client) ToCSS(res resource.Resource, opts Options) (resource.Resource,
|
||||||
internalOptions.to.Precision = 8
|
internalOptions.to.Precision = 8
|
||||||
}
|
}
|
||||||
|
|
||||||
return c.rs.Transform(
|
return res.Transform(&toCSSTransformation{c: c, options: internalOptions})
|
||||||
res,
|
|
||||||
&toCSSTransformation{c: c, options: internalOptions},
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type toCSSTransformation struct {
|
type toCSSTransformation struct {
|
||||||
|
@ -94,8 +92,8 @@ type toCSSTransformation struct {
|
||||||
options options
|
options options
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *toCSSTransformation) Key() resources.ResourceTransformationKey {
|
func (t *toCSSTransformation) Key() internal.ResourceTransformationKey {
|
||||||
return resources.NewResourceTransformationKey("tocss", t.options.from)
|
return internal.NewResourceTransformationKey("tocss", t.options.from)
|
||||||
}
|
}
|
||||||
|
|
||||||
func DecodeOptions(m map[string]interface{}) (opts Options, err error) {
|
func DecodeOptions(m map[string]interface{}) (opts Options, err error) {
|
||||||
|
|
|
@ -4,8 +4,6 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/htesting/hqt"
|
|
||||||
|
|
||||||
"image"
|
"image"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
@ -28,8 +26,10 @@ import (
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
)
|
)
|
||||||
|
|
||||||
func newTestResourceSpec(c *qt.C) *Spec {
|
type specDescriptor struct {
|
||||||
return newTestResourceSpecForBaseURL(c, "https://example.com/")
|
baseURL string
|
||||||
|
c *qt.C
|
||||||
|
fs afero.Fs
|
||||||
}
|
}
|
||||||
|
|
||||||
func createTestCfg() *viper.Viper {
|
func createTestCfg() *viper.Viper {
|
||||||
|
@ -54,7 +54,20 @@ func createTestCfg() *viper.Viper {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newTestResourceSpecForBaseURL(c *qt.C, baseURL string) *Spec {
|
func newTestResourceSpec(desc specDescriptor) *Spec {
|
||||||
|
|
||||||
|
baseURL := desc.baseURL
|
||||||
|
if baseURL == "" {
|
||||||
|
baseURL = "https://example.com/"
|
||||||
|
}
|
||||||
|
|
||||||
|
afs := desc.fs
|
||||||
|
if afs == nil {
|
||||||
|
afs = afero.NewMemMapFs()
|
||||||
|
}
|
||||||
|
|
||||||
|
c := desc.c
|
||||||
|
|
||||||
cfg := createTestCfg()
|
cfg := createTestCfg()
|
||||||
cfg.Set("baseURL", baseURL)
|
cfg.Set("baseURL", baseURL)
|
||||||
|
|
||||||
|
@ -66,7 +79,8 @@ func newTestResourceSpecForBaseURL(c *qt.C, baseURL string) *Spec {
|
||||||
|
|
||||||
cfg.Set("imaging", imagingCfg)
|
cfg.Set("imaging", imagingCfg)
|
||||||
|
|
||||||
fs := hugofs.NewMem(cfg)
|
fs := hugofs.NewFrom(afs, cfg)
|
||||||
|
fs.Destination = hugofs.NewCreateCountingFs(fs.Destination)
|
||||||
|
|
||||||
s, err := helpers.NewPathSpec(fs, cfg, nil)
|
s, err := helpers.NewPathSpec(fs, cfg, nil)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
@ -117,19 +131,23 @@ func newTestResourceOsFs(c *qt.C) *Spec {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchSunset(c *qt.C) *Image {
|
func fetchSunset(c *qt.C) resource.Image {
|
||||||
return fetchImage(c, "sunset.jpg")
|
return fetchImage(c, "sunset.jpg")
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchImage(c *qt.C, name string) *Image {
|
func fetchImage(c *qt.C, name string) resource.Image {
|
||||||
spec := newTestResourceSpec(c)
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
return fetchImageForSpec(spec, c, name)
|
return fetchImageForSpec(spec, c, name)
|
||||||
}
|
}
|
||||||
|
func fetchImageForSpec(spec *Spec, c *qt.C, name string) resource.Image {
|
||||||
func fetchImageForSpec(spec *Spec, c *qt.C, name string) *Image {
|
|
||||||
r := fetchResourceForSpec(spec, c, name)
|
r := fetchResourceForSpec(spec, c, name)
|
||||||
c.Assert(r, hqt.IsSameType, &Image{})
|
|
||||||
return r.(*Image)
|
img := r.(resource.Image)
|
||||||
|
|
||||||
|
c.Assert(img, qt.Not(qt.IsNil))
|
||||||
|
c.Assert(img.(specProvider).getSpec(), qt.Not(qt.IsNil))
|
||||||
|
|
||||||
|
return img
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchResourceForSpec(spec *Spec, c *qt.C, name string) resource.ContentResource {
|
func fetchResourceForSpec(spec *Spec, c *qt.C, name string) resource.ContentResource {
|
||||||
|
|
|
@ -15,45 +15,63 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
"path"
|
"path"
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/spf13/afero"
|
||||||
|
|
||||||
|
bp "github.com/gohugoio/hugo/bufferpool"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/resources/internal"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/collections"
|
|
||||||
"github.com/gohugoio/hugo/common/herrors"
|
"github.com/gohugoio/hugo/common/herrors"
|
||||||
"github.com/gohugoio/hugo/common/hugio"
|
"github.com/gohugoio/hugo/common/hugio"
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
"github.com/gohugoio/hugo/resources/resource"
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
"github.com/mitchellh/hashstructure"
|
|
||||||
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/media"
|
"github.com/gohugoio/hugo/media"
|
||||||
|
|
||||||
bp "github.com/gohugoio/hugo/bufferpool"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
_ resource.ContentResource = (*transformedResource)(nil)
|
_ resource.ContentResource = (*resourceAdapter)(nil)
|
||||||
_ resource.ReadSeekCloserResource = (*transformedResource)(nil)
|
_ resource.ReadSeekCloserResource = (*resourceAdapter)(nil)
|
||||||
_ collections.Slicer = (*transformedResource)(nil)
|
_ resource.Resource = (*resourceAdapter)(nil)
|
||||||
_ resource.Identifier = (*transformedResource)(nil)
|
_ resource.Source = (*resourceAdapter)(nil)
|
||||||
|
_ resource.Identifier = (*resourceAdapter)(nil)
|
||||||
|
_ resource.ResourceMetaProvider = (*resourceAdapter)(nil)
|
||||||
)
|
)
|
||||||
|
|
||||||
func (s *Spec) Transform(r resource.Resource, t ResourceTransformation) (resource.Resource, error) {
|
// These are transformations that need special support in Hugo that may not
|
||||||
if r == nil {
|
// be available when building the theme/site so we write the transformation
|
||||||
return nil, errors.New("got nil Resource in transformation. Make sure you check with 'with' or 'if' when you get a resource, e.g. with resources.Get.")
|
// result to disk and reuse if needed for these,
|
||||||
}
|
var transformationsToCacheOnDisk = map[string]bool{
|
||||||
|
"postcss": true,
|
||||||
|
"tocss": true,
|
||||||
|
}
|
||||||
|
|
||||||
return &transformedResource{
|
func newResourceAdapter(spec *Spec, lazyPublish bool, target transformableResource) *resourceAdapter {
|
||||||
Resource: r,
|
var po *publishOnce
|
||||||
transformation: t,
|
if lazyPublish {
|
||||||
transformedResourceMetadata: transformedResourceMetadata{MetaData: make(map[string]interface{})},
|
po = &publishOnce{}
|
||||||
cache: s.ResourceCache}, nil
|
}
|
||||||
|
return &resourceAdapter{
|
||||||
|
resourceTransformations: &resourceTransformations{},
|
||||||
|
resourceAdapterInner: &resourceAdapterInner{
|
||||||
|
spec: spec,
|
||||||
|
publishOnce: po,
|
||||||
|
target: target,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResourceTransformation is the interface that a resource transformation step
|
||||||
|
// needs to implement.
|
||||||
|
type ResourceTransformation interface {
|
||||||
|
Key() internal.ResourceTransformationKey
|
||||||
|
Transform(ctx *ResourceTransformationCtx) error
|
||||||
}
|
}
|
||||||
|
|
||||||
type ResourceTransformationCtx struct {
|
type ResourceTransformationCtx struct {
|
||||||
|
@ -95,20 +113,6 @@ func (ctx *ResourceTransformationCtx) AddOutPathIdentifier(identifier string) {
|
||||||
ctx.OutPath = ctx.addPathIdentifier(ctx.InPath, identifier)
|
ctx.OutPath = ctx.addPathIdentifier(ctx.InPath, identifier)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ctx *ResourceTransformationCtx) addPathIdentifier(inPath, identifier string) string {
|
|
||||||
dir, file := path.Split(inPath)
|
|
||||||
base, ext := helpers.PathAndExt(file)
|
|
||||||
return path.Join(dir, (base + identifier + ext))
|
|
||||||
}
|
|
||||||
|
|
||||||
// ReplaceOutPathExtension transforming InPath to OutPath replacing the file
|
|
||||||
// extension, e.g. ".scss"
|
|
||||||
func (ctx *ResourceTransformationCtx) ReplaceOutPathExtension(newExt string) {
|
|
||||||
dir, file := path.Split(ctx.InPath)
|
|
||||||
base, _ := helpers.PathAndExt(file)
|
|
||||||
ctx.OutPath = path.Join(dir, (base + newExt))
|
|
||||||
}
|
|
||||||
|
|
||||||
// PublishSourceMap writes the content to the target folder of the main resource
|
// PublishSourceMap writes the content to the target folder of the main resource
|
||||||
// with the ".map" extension added.
|
// with the ".map" extension added.
|
||||||
func (ctx *ResourceTransformationCtx) PublishSourceMap(content string) error {
|
func (ctx *ResourceTransformationCtx) PublishSourceMap(content string) error {
|
||||||
|
@ -122,240 +126,198 @@ func (ctx *ResourceTransformationCtx) PublishSourceMap(content string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// ResourceTransformationKey are provided by the different transformation implementations.
|
// ReplaceOutPathExtension transforming InPath to OutPath replacing the file
|
||||||
// It identifies the transformation (name) and its configuration (elements).
|
// extension, e.g. ".scss"
|
||||||
// We combine this in a chain with the rest of the transformations
|
func (ctx *ResourceTransformationCtx) ReplaceOutPathExtension(newExt string) {
|
||||||
// with the target filename and a content hash of the origin to use as cache key.
|
dir, file := path.Split(ctx.InPath)
|
||||||
type ResourceTransformationKey struct {
|
base, _ := helpers.PathAndExt(file)
|
||||||
name string
|
ctx.OutPath = path.Join(dir, (base + newExt))
|
||||||
elements []interface{}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewResourceTransformationKey creates a new ResourceTransformationKey from the transformation
|
func (ctx *ResourceTransformationCtx) addPathIdentifier(inPath, identifier string) string {
|
||||||
// name and elements. We will create a 64 bit FNV hash from the elements, which when combined
|
dir, file := path.Split(inPath)
|
||||||
// with the other key elements should be unique for all practical applications.
|
base, ext := helpers.PathAndExt(file)
|
||||||
func NewResourceTransformationKey(name string, elements ...interface{}) ResourceTransformationKey {
|
return path.Join(dir, (base + identifier + ext))
|
||||||
return ResourceTransformationKey{name: name, elements: elements}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Do not change this without good reasons.
|
type publishOnce struct {
|
||||||
func (k ResourceTransformationKey) key() string {
|
publisherInit sync.Once
|
||||||
if len(k.elements) == 0 {
|
publisherErr error
|
||||||
return k.name
|
|
||||||
}
|
|
||||||
|
|
||||||
sb := bp.GetBuffer()
|
|
||||||
defer bp.PutBuffer(sb)
|
|
||||||
|
|
||||||
sb.WriteString(k.name)
|
|
||||||
for _, element := range k.elements {
|
|
||||||
hash, err := hashstructure.Hash(element, nil)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
sb.WriteString("_")
|
|
||||||
sb.WriteString(strconv.FormatUint(hash, 10))
|
|
||||||
}
|
|
||||||
|
|
||||||
return sb.String()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ResourceTransformation is the interface that a resource transformation step
|
type resourceAdapter struct {
|
||||||
// needs to implement.
|
|
||||||
type ResourceTransformation interface {
|
|
||||||
Key() ResourceTransformationKey
|
|
||||||
Transform(ctx *ResourceTransformationCtx) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// We will persist this information to disk.
|
|
||||||
type transformedResourceMetadata struct {
|
|
||||||
Target string `json:"Target"`
|
|
||||||
MediaTypeV string `json:"MediaType"`
|
|
||||||
MetaData map[string]interface{} `json:"Data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type transformedResource struct {
|
|
||||||
commonResource
|
commonResource
|
||||||
|
*resourceTransformations
|
||||||
cache *ResourceCache
|
*resourceAdapterInner
|
||||||
|
|
||||||
// This is the filename inside resources/_gen/assets
|
|
||||||
sourceFilename string
|
|
||||||
|
|
||||||
linker permalinker
|
|
||||||
|
|
||||||
// The transformation to apply.
|
|
||||||
transformation ResourceTransformation
|
|
||||||
|
|
||||||
// We apply the tranformations lazily.
|
|
||||||
transformInit sync.Once
|
|
||||||
transformErr error
|
|
||||||
|
|
||||||
// We delay publishing until either .RelPermalink or .Permalink
|
|
||||||
// is invoked.
|
|
||||||
publishInit sync.Once
|
|
||||||
published bool
|
|
||||||
|
|
||||||
// The transformed values
|
|
||||||
content string
|
|
||||||
contentInit sync.Once
|
|
||||||
transformedResourceMetadata
|
|
||||||
|
|
||||||
// The source
|
|
||||||
resource.Resource
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *transformedResource) ReadSeekCloser() (hugio.ReadSeekCloser, error) {
|
func (r *resourceAdapter) Content() (interface{}, error) {
|
||||||
if err := r.initContent(); err != nil {
|
r.init(false, true)
|
||||||
return nil, err
|
if r.transformationsErr != nil {
|
||||||
|
return nil, r.transformationsErr
|
||||||
}
|
}
|
||||||
return hugio.NewReadSeekerNoOpCloserFromString(r.content), nil
|
return r.target.Content()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *transformedResource) transferTransformedValues(another *transformedResource) {
|
func (r *resourceAdapter) Data() interface{} {
|
||||||
if another.content != "" {
|
r.init(false, false)
|
||||||
r.contentInit.Do(func() {
|
return r.target.Data()
|
||||||
r.content = another.content
|
|
||||||
})
|
|
||||||
}
|
|
||||||
r.transformedResourceMetadata = another.transformedResourceMetadata
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *transformedResource) tryTransformedFileCache(key string) io.ReadCloser {
|
func (r *resourceAdapter) Fill(spec string) (resource.Image, error) {
|
||||||
fi, f, meta, found := r.cache.getFromFile(key)
|
return r.getImageOps().Fill(spec)
|
||||||
if !found {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
r.transformedResourceMetadata = meta
|
|
||||||
r.sourceFilename = fi.Name
|
|
||||||
|
|
||||||
return f
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *transformedResource) Content() (interface{}, error) {
|
func (r *resourceAdapter) Fit(spec string) (resource.Image, error) {
|
||||||
if err := r.initTransform(true, false); err != nil {
|
return r.getImageOps().Fit(spec)
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := r.initContent(); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return r.content, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *transformedResource) Data() interface{} {
|
func (r *resourceAdapter) Height() int {
|
||||||
if err := r.initTransform(false, false); err != nil {
|
return r.getImageOps().Height()
|
||||||
return noData
|
|
||||||
}
|
|
||||||
return r.MetaData
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *transformedResource) MediaType() media.Type {
|
func (r *resourceAdapter) Key() string {
|
||||||
if err := r.initTransform(false, false); err != nil {
|
r.init(false, false)
|
||||||
return media.Type{}
|
return r.target.(resource.Identifier).Key()
|
||||||
}
|
|
||||||
m, _ := r.cache.rs.MediaTypes.GetByType(r.MediaTypeV)
|
|
||||||
return m
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *transformedResource) Key() string {
|
func (r *resourceAdapter) MediaType() media.Type {
|
||||||
if err := r.initTransform(false, false); err != nil {
|
r.init(false, false)
|
||||||
return ""
|
return r.target.MediaType()
|
||||||
}
|
|
||||||
return r.linker.relPermalinkFor(r.Target)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *transformedResource) Permalink() string {
|
func (r *resourceAdapter) Name() string {
|
||||||
if err := r.initTransform(false, true); err != nil {
|
r.init(false, false)
|
||||||
return ""
|
return r.target.Name()
|
||||||
}
|
|
||||||
return r.linker.permalinkFor(r.Target)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *transformedResource) RelPermalink() string {
|
func (r *resourceAdapter) Params() map[string]interface{} {
|
||||||
if err := r.initTransform(false, true); err != nil {
|
r.init(false, false)
|
||||||
return ""
|
return r.target.Params()
|
||||||
}
|
|
||||||
return r.linker.relPermalinkFor(r.Target)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *transformedResource) initContent() error {
|
func (r *resourceAdapter) Permalink() string {
|
||||||
var err error
|
r.init(true, false)
|
||||||
r.contentInit.Do(func() {
|
return r.target.Permalink()
|
||||||
var b []byte
|
}
|
||||||
_, b, err = r.cache.fileCache.GetBytes(r.sourceFilename)
|
|
||||||
if err != nil {
|
func (r *resourceAdapter) Publish() error {
|
||||||
|
r.init(false, false)
|
||||||
|
|
||||||
|
return r.target.Publish()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *resourceAdapter) ReadSeekCloser() (hugio.ReadSeekCloser, error) {
|
||||||
|
r.init(false, false)
|
||||||
|
return r.target.ReadSeekCloser()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *resourceAdapter) RelPermalink() string {
|
||||||
|
r.init(true, false)
|
||||||
|
return r.target.RelPermalink()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *resourceAdapter) Resize(spec string) (resource.Image, error) {
|
||||||
|
return r.getImageOps().Resize(spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *resourceAdapter) ResourceType() string {
|
||||||
|
r.init(false, false)
|
||||||
|
return r.target.ResourceType()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *resourceAdapter) String() string {
|
||||||
|
return r.Name()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *resourceAdapter) Title() string {
|
||||||
|
r.init(false, false)
|
||||||
|
return r.target.Title()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r resourceAdapter) Transform(t ...ResourceTransformation) (ResourceTransformer, error) {
|
||||||
|
r.resourceTransformations = &resourceTransformations{
|
||||||
|
transformations: append(r.transformations, t...),
|
||||||
|
}
|
||||||
|
|
||||||
|
r.resourceAdapterInner = &resourceAdapterInner{
|
||||||
|
spec: r.spec,
|
||||||
|
publishOnce: &publishOnce{},
|
||||||
|
target: r.target,
|
||||||
|
}
|
||||||
|
|
||||||
|
return &r, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *resourceAdapter) Width() int {
|
||||||
|
return r.getImageOps().Width()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *resourceAdapter) getImageOps() resource.ImageOps {
|
||||||
|
img, ok := r.target.(resource.ImageOps)
|
||||||
|
if !ok {
|
||||||
|
panic(fmt.Sprintf("%T is not an image", r.target))
|
||||||
|
}
|
||||||
|
r.init(false, false)
|
||||||
|
return img
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *resourceAdapter) getMetaAssigner() metaAssigner {
|
||||||
|
return r.target
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *resourceAdapter) getSpec() *Spec {
|
||||||
|
return r.spec
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *resourceAdapter) publish() {
|
||||||
|
if r.publishOnce == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
r.content = string(b)
|
|
||||||
|
r.publisherInit.Do(func() {
|
||||||
|
r.publisherErr = r.target.Publish()
|
||||||
|
|
||||||
|
if r.publisherErr != nil {
|
||||||
|
r.spec.Logger.ERROR.Printf("Failed to publish Resource: %s", r.publisherErr)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *transformedResource) openPublishFileForWriting(relTargetPath string) (io.WriteCloser, error) {
|
func (r *resourceAdapter) transform(publish, setContent bool) error {
|
||||||
return helpers.OpenFilesForWriting(r.cache.rs.PublishFs, r.linker.relTargetPathsFor(relTargetPath)...)
|
cache := r.spec.ResourceCache
|
||||||
}
|
|
||||||
|
|
||||||
func (r *transformedResource) transform(setContent, publish bool) (err error) {
|
|
||||||
|
|
||||||
// This can be the last resource in a chain.
|
|
||||||
// Rewind and create a processing chain.
|
|
||||||
var chain []resource.Resource
|
|
||||||
current := r
|
|
||||||
for {
|
|
||||||
rr := current.Resource
|
|
||||||
chain = append(chain[:0], append([]resource.Resource{rr}, chain[0:]...)...)
|
|
||||||
if tr, ok := rr.(*transformedResource); ok {
|
|
||||||
current = tr
|
|
||||||
} else {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Append the current transformer at the end
|
|
||||||
chain = append(chain, r)
|
|
||||||
|
|
||||||
first := chain[0]
|
|
||||||
|
|
||||||
// Files with a suffix will be stored in cache (both on disk and in memory)
|
// Files with a suffix will be stored in cache (both on disk and in memory)
|
||||||
// partitioned by their suffix. There will be other files below /other.
|
// partitioned by their suffix.
|
||||||
// This partition is also how we determine what to delete on server reloads.
|
var key string
|
||||||
var key, base string
|
for _, tr := range r.transformations {
|
||||||
for _, element := range chain {
|
key = key + "_" + tr.Key().Value()
|
||||||
switch v := element.(type) {
|
|
||||||
case *transformedResource:
|
|
||||||
key = key + "_" + v.transformation.Key().key()
|
|
||||||
case permalinker:
|
|
||||||
r.linker = v
|
|
||||||
p := v.TargetPath()
|
|
||||||
if p == "" {
|
|
||||||
panic("target path needed for key creation")
|
|
||||||
}
|
|
||||||
base = ResourceCacheKey(p)
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("transformation not supported for type %T", element)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
key = r.cache.cleanKey(base) + "_" + helpers.MD5String(key)
|
base := ResourceCacheKey(r.target.TargetPath())
|
||||||
|
|
||||||
|
key = cache.cleanKey(base) + "_" + helpers.MD5String(key)
|
||||||
|
|
||||||
|
cached, found := cache.get(key)
|
||||||
|
|
||||||
cached, found := r.cache.get(key)
|
|
||||||
if found {
|
if found {
|
||||||
r.transferTransformedValues(cached.(*transformedResource))
|
r.resourceAdapterInner = cached.(*resourceAdapterInner)
|
||||||
return
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Acquire a write lock for the named transformation.
|
// Acquire a write lock for the named transformation.
|
||||||
r.cache.nlocker.Lock(key)
|
cache.nlocker.Lock(key)
|
||||||
// Check the cache again.
|
// Check the cache again.
|
||||||
cached, found = r.cache.get(key)
|
cached, found = cache.get(key)
|
||||||
if found {
|
if found {
|
||||||
r.transferTransformedValues(cached.(*transformedResource))
|
r.resourceAdapterInner = cached.(*resourceAdapterInner)
|
||||||
r.cache.nlocker.Unlock(key)
|
cache.nlocker.Unlock(key)
|
||||||
return
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
defer r.cache.nlocker.Unlock(key)
|
defer cache.nlocker.Unlock(key)
|
||||||
defer r.cache.set(key, r)
|
defer cache.set(key, r.resourceAdapterInner)
|
||||||
|
|
||||||
b1 := bp.GetBuffer()
|
b1 := bp.GetBuffer()
|
||||||
b2 := bp.GetBuffer()
|
b2 := bp.GetBuffer()
|
||||||
|
@ -363,68 +325,77 @@ func (r *transformedResource) transform(setContent, publish bool) (err error) {
|
||||||
defer bp.PutBuffer(b2)
|
defer bp.PutBuffer(b2)
|
||||||
|
|
||||||
tctx := &ResourceTransformationCtx{
|
tctx := &ResourceTransformationCtx{
|
||||||
Data: r.transformedResourceMetadata.MetaData,
|
Data: make(map[string]interface{}),
|
||||||
OpenResourcePublisher: r.openPublishFileForWriting,
|
OpenResourcePublisher: r.target.openPublishFileForWriting,
|
||||||
}
|
}
|
||||||
|
|
||||||
tctx.InMediaType = first.MediaType()
|
tctx.InMediaType = r.target.MediaType()
|
||||||
tctx.OutMediaType = first.MediaType()
|
tctx.OutMediaType = r.target.MediaType()
|
||||||
|
|
||||||
contentrc, err := contentReadSeekerCloser(first)
|
startCtx := *tctx
|
||||||
|
updates := &transformationUpdate{startCtx: startCtx}
|
||||||
|
|
||||||
|
var contentrc hugio.ReadSeekCloser
|
||||||
|
|
||||||
|
contentrc, err := contentReadSeekerCloser(r.target)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
defer contentrc.Close()
|
defer contentrc.Close()
|
||||||
|
|
||||||
tctx.From = contentrc
|
tctx.From = contentrc
|
||||||
tctx.To = b1
|
tctx.To = b1
|
||||||
|
|
||||||
if r.linker != nil {
|
tctx.InPath = r.target.TargetPath()
|
||||||
tctx.InPath = r.linker.TargetPath()
|
|
||||||
tctx.SourcePath = tctx.InPath
|
tctx.SourcePath = tctx.InPath
|
||||||
}
|
|
||||||
|
|
||||||
counter := 0
|
counter := 0
|
||||||
|
writeToFileCache := false
|
||||||
|
|
||||||
var transformedContentr io.Reader
|
var transformedContentr io.Reader
|
||||||
|
|
||||||
for _, element := range chain {
|
for i, tr := range r.transformations {
|
||||||
tr, ok := element.(*transformedResource)
|
if i != 0 {
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
counter++
|
|
||||||
if counter != 1 {
|
|
||||||
tctx.InMediaType = tctx.OutMediaType
|
tctx.InMediaType = tctx.OutMediaType
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !writeToFileCache {
|
||||||
|
writeToFileCache = transformationsToCacheOnDisk[tr.Key().Name]
|
||||||
|
}
|
||||||
|
|
||||||
|
if i > 0 {
|
||||||
|
hasWrites := tctx.To.(*bytes.Buffer).Len() > 0
|
||||||
|
if hasWrites {
|
||||||
|
counter++
|
||||||
|
// Switch the buffers
|
||||||
if counter%2 == 0 {
|
if counter%2 == 0 {
|
||||||
|
tctx.From = b2
|
||||||
|
b1.Reset()
|
||||||
|
tctx.To = b1
|
||||||
|
} else {
|
||||||
tctx.From = b1
|
tctx.From = b1
|
||||||
b2.Reset()
|
b2.Reset()
|
||||||
tctx.To = b2
|
tctx.To = b2
|
||||||
} else {
|
|
||||||
if counter != 1 {
|
|
||||||
// The first reader is the file.
|
|
||||||
tctx.From = b2
|
|
||||||
}
|
}
|
||||||
b1.Reset()
|
}
|
||||||
tctx.To = b1
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := tr.transformation.Transform(tctx); err != nil {
|
if err = tr.Transform(tctx); err != nil {
|
||||||
|
if writeToFileCache && err == herrors.ErrFeatureNotAvailable {
|
||||||
if err == herrors.ErrFeatureNotAvailable {
|
|
||||||
// This transformation is not available in this
|
// This transformation is not available in this
|
||||||
// Hugo installation (scss not compiled in, PostCSS not available etc.)
|
// Hugo installation (scss not compiled in, PostCSS not available etc.)
|
||||||
// If a prepared bundle for this transformation chain is available, use that.
|
// If a prepared bundle for this transformation chain is available, use that.
|
||||||
f := r.tryTransformedFileCache(key)
|
f := r.target.tryTransformedFileCache(key, updates)
|
||||||
if f == nil {
|
if f == nil {
|
||||||
errMsg := err.Error()
|
errMsg := err.Error()
|
||||||
if tr.transformation.Key().name == "postcss" {
|
if tr.Key().Name == "postcss" {
|
||||||
errMsg = "PostCSS not found; install with \"npm install postcss-cli\". See https://gohugo.io/hugo-pipes/postcss/"
|
errMsg = "PostCSS not found; install with \"npm install postcss-cli\". See https://gohugo.io/hugo-pipes/postcss/"
|
||||||
}
|
}
|
||||||
return fmt.Errorf("%s: failed to transform %q (%s): %s", strings.ToUpper(tr.transformation.Key().name), tctx.InPath, tctx.InMediaType.Type(), errMsg)
|
return fmt.Errorf("%s: failed to transform %q (%s): %s", strings.ToUpper(tr.Key().Name), tctx.InPath, tctx.InMediaType.Type(), errMsg)
|
||||||
}
|
}
|
||||||
transformedContentr = f
|
transformedContentr = f
|
||||||
|
updates.sourceFs = cache.fileCache.Fs
|
||||||
defer f.Close()
|
defer f.Close()
|
||||||
|
|
||||||
// The reader above is all we need.
|
// The reader above is all we need.
|
||||||
|
@ -442,34 +413,35 @@ func (r *transformedResource) transform(setContent, publish bool) (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if transformedContentr == nil {
|
if transformedContentr == nil {
|
||||||
r.Target = tctx.InPath
|
updates.updateFromCtx(tctx)
|
||||||
r.MediaTypeV = tctx.OutMediaType.Type()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var publishwriters []io.WriteCloser
|
var publishwriters []io.WriteCloser
|
||||||
|
|
||||||
if publish {
|
if publish {
|
||||||
publicw, err := r.openPublishFileForWriting(r.Target)
|
publicw, err := r.target.openPublishFileForWriting(updates.targetPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.transformErr = err
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer publicw.Close()
|
|
||||||
|
|
||||||
publishwriters = append(publishwriters, publicw)
|
publishwriters = append(publishwriters, publicw)
|
||||||
}
|
}
|
||||||
|
|
||||||
if transformedContentr == nil {
|
if transformedContentr == nil {
|
||||||
|
if writeToFileCache {
|
||||||
// Also write it to the cache
|
// Also write it to the cache
|
||||||
fi, metaw, err := r.cache.writeMeta(key, r.transformedResourceMetadata)
|
fi, metaw, err := cache.writeMeta(key, updates.toTransformedResourceMetadata())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
r.sourceFilename = fi.Name
|
updates.sourceFilename = &fi.Name
|
||||||
|
updates.sourceFs = cache.fileCache.Fs
|
||||||
publishwriters = append(publishwriters, metaw)
|
publishwriters = append(publishwriters, metaw)
|
||||||
|
}
|
||||||
|
|
||||||
if counter > 0 {
|
// Any transofrmations reading from From must also write to To.
|
||||||
|
// This means that if the target buffer is empty, we can just reuse
|
||||||
|
// the original reader.
|
||||||
|
if b, ok := tctx.To.(*bytes.Buffer); ok && b.Len() > 0 {
|
||||||
transformedContentr = tctx.To.(*bytes.Buffer)
|
transformedContentr = tctx.To.(*bytes.Buffer)
|
||||||
} else {
|
} else {
|
||||||
transformedContentr = contentrc
|
transformedContentr = contentrc
|
||||||
|
@ -479,6 +451,8 @@ func (r *transformedResource) transform(setContent, publish bool) (err error) {
|
||||||
// Also write it to memory
|
// Also write it to memory
|
||||||
var contentmemw *bytes.Buffer
|
var contentmemw *bytes.Buffer
|
||||||
|
|
||||||
|
setContent = setContent || !writeToFileCache
|
||||||
|
|
||||||
if setContent {
|
if setContent {
|
||||||
contentmemw = bp.GetBuffer()
|
contentmemw = bp.GetBuffer()
|
||||||
defer bp.PutBuffer(contentmemw)
|
defer bp.PutBuffer(contentmemw)
|
||||||
|
@ -486,65 +460,111 @@ func (r *transformedResource) transform(setContent, publish bool) (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
publishw := hugio.NewMultiWriteCloser(publishwriters...)
|
publishw := hugio.NewMultiWriteCloser(publishwriters...)
|
||||||
_, r.transformErr = io.Copy(publishw, transformedContentr)
|
_, err = io.Copy(publishw, transformedContentr)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
publishw.Close()
|
publishw.Close()
|
||||||
|
|
||||||
if setContent {
|
if setContent {
|
||||||
r.contentInit.Do(func() {
|
s := contentmemw.String()
|
||||||
r.content = contentmemw.String()
|
updates.content = &s
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
newTarget, err := r.target.cloneWithUpdates(updates)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
r.target = newTarget
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *transformedResource) initTransform(setContent, publish bool) error {
|
func (r *resourceAdapter) init(publish, setContent bool) {
|
||||||
r.transformInit.Do(func() {
|
r.initTransform(publish, setContent)
|
||||||
r.published = publish
|
}
|
||||||
if err := r.transform(setContent, publish); err != nil {
|
|
||||||
r.transformErr = err
|
|
||||||
r.cache.rs.Logger.ERROR.Println("error: failed to transform resource:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
})
|
func (r *resourceAdapter) initTransform(publish, setContent bool) {
|
||||||
|
r.transformationsInit.Do(func() {
|
||||||
if !publish {
|
if len(r.transformations) == 0 {
|
||||||
return r.transformErr
|
// Nothing to do.
|
||||||
}
|
|
||||||
|
|
||||||
r.publishInit.Do(func() {
|
|
||||||
if r.published {
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
r.published = true
|
if publish {
|
||||||
|
// The transformation will write the content directly to
|
||||||
// Copy the file from cache to /public
|
// the destination.
|
||||||
_, src, err := r.cache.fileCache.Get(r.sourceFilename)
|
r.publishOnce = nil
|
||||||
if src == nil {
|
|
||||||
panic(fmt.Sprintf("[BUG] resource cache file not found: %q", r.sourceFilename))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err == nil {
|
r.transformationsErr = r.transform(publish, setContent)
|
||||||
defer src.Close()
|
if r.transformationsErr != nil {
|
||||||
|
r.spec.Logger.ERROR.Printf("Transformation failed: %s", r.transformationsErr)
|
||||||
var dst io.WriteCloser
|
|
||||||
dst, err = r.openPublishFileForWriting(r.Target)
|
|
||||||
if err == nil {
|
|
||||||
defer dst.Close()
|
|
||||||
io.Copy(dst, src)
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
r.transformErr = err
|
|
||||||
r.cache.rs.Logger.ERROR.Println("error: failed to publish resource:", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
})
|
})
|
||||||
|
|
||||||
return r.transformErr
|
if publish && r.publishOnce != nil {
|
||||||
|
r.publish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type resourceAdapterInner struct {
|
||||||
|
target transformableResource
|
||||||
|
|
||||||
|
spec *Spec
|
||||||
|
|
||||||
|
// Handles publishing (to /public) if needed.
|
||||||
|
*publishOnce
|
||||||
|
}
|
||||||
|
|
||||||
|
type resourceTransformations struct {
|
||||||
|
transformationsInit sync.Once
|
||||||
|
transformationsErr error
|
||||||
|
transformations []ResourceTransformation
|
||||||
|
}
|
||||||
|
|
||||||
|
type transformableResource interface {
|
||||||
|
baseResourceInternal
|
||||||
|
|
||||||
|
resource.ContentProvider
|
||||||
|
resource.Resource
|
||||||
|
}
|
||||||
|
|
||||||
|
type transformationUpdate struct {
|
||||||
|
content *string
|
||||||
|
sourceFilename *string
|
||||||
|
sourceFs afero.Fs
|
||||||
|
targetPath string
|
||||||
|
mediaType media.Type
|
||||||
|
data map[string]interface{}
|
||||||
|
|
||||||
|
startCtx ResourceTransformationCtx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *transformationUpdate) isContenChanged() bool {
|
||||||
|
return u.content != nil || u.sourceFilename != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *transformationUpdate) toTransformedResourceMetadata() transformedResourceMetadata {
|
||||||
|
return transformedResourceMetadata{
|
||||||
|
MediaTypeV: u.mediaType.Type(),
|
||||||
|
Target: u.targetPath,
|
||||||
|
MetaData: u.data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *transformationUpdate) updateFromCtx(ctx *ResourceTransformationCtx) {
|
||||||
|
u.targetPath = ctx.OutPath
|
||||||
|
u.mediaType = ctx.OutMediaType
|
||||||
|
u.data = ctx.Data
|
||||||
|
u.targetPath = ctx.InPath
|
||||||
|
}
|
||||||
|
|
||||||
|
// We will persist this information to disk.
|
||||||
|
type transformedResourceMetadata struct {
|
||||||
|
Target string `json:"Target"`
|
||||||
|
MediaTypeV string `json:"MediaType"`
|
||||||
|
MetaData map[string]interface{} `json:"Data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// contentReadSeekerCloser returns a ReadSeekerCloser if possible for a given Resource.
|
// contentReadSeekerCloser returns a ReadSeekerCloser if possible for a given Resource.
|
||||||
|
|
|
@ -14,23 +14,427 @@
|
||||||
package resources
|
package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/htesting"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/common/herrors"
|
||||||
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/media"
|
||||||
|
"github.com/gohugoio/hugo/resources/internal"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
)
|
)
|
||||||
|
|
||||||
type testStruct struct {
|
const gopher = `iVBORw0KGgoAAAANSUhEUgAAAEsAAAA8CAAAAAALAhhPAAAFfUlEQVRYw62XeWwUVRzHf2+OPbo9d7tsWyiyaZti6eWGAhISoIGKECEKCAiJJkYTiUgTMYSIosYYBBIUIxoSPIINEBDi2VhwkQrVsj1ESgu9doHWdrul7ba73WNm3vOPtsseM9MdwvvrzTs+8/t95ze/33sI5BqiabU6m9En8oNjduLnAEDLUsQXFF8tQ5oxK3vmnNmDSMtrncks9Hhtt/qeWZapHb1ha3UqYSWVl2ZmpWgaXMXGohQAvmeop3bjTRtv6SgaK/Pb9/bFzUrYslbFAmHPp+3WhAYdr+7GN/YnpN46Opv55VDsJkoEpMrY/vO2BIYQ6LLvm0ThY3MzDzzeSJeeWNyTkgnIE5ePKsvKlcg/0T9QMzXalwXMlj54z4c0rh/mzEfr+FgWEz2w6uk8dkzFAgcARAgNp1ZYef8bH2AgvuStbc2/i6CiWGj98y2tw2l4FAXKkQBIf+exyRnteY83LfEwDQAYCoK+P6bxkZm/0966LxcAAILHB56kgD95PPxltuYcMtFTWw/FKkY/6Opf3GGd9ZF+Qp6mzJxzuRSractOmJrH1u8XTvWFHINNkLQLMR+XHXvfPPHw967raE1xxwtA36IMRfkAAG29/7mLuQcb2WOnsJReZGfpiHsSBX81cvMKywYZHhX5hFPtOqPGWZCXnhWGAu6lX91ElKXSalcLXu3UaOXVay57ZSe5f6Gpx7J2MXAsi7EqSp09b/MirKSyJfnfEEgeDjl8FgDAfvewP03zZ+AJ0m9aFRM8eEHBDRKjfcreDXnZdQuAxXpT2NRJ7xl3UkLBhuVGU16gZiGOgZmrSbRdqkILuL/yYoSXHHkl9KXgqNu3PB8oRg0geC5vFmLjad6mUyTKLmF3OtraWDIfACyXqmephaDABawfpi6tqqBZytfQMqOz6S09iWXhktrRaB8Xz4Yi/8gyABDm5NVe6qq/3VzPrcjELWrebVuyY2T7ar4zQyybUCtsQ5Es1FGaZVrRVQwAgHGW2ZCRZshI5bGQi7HesyE972pOSeMM0dSktlzxRdrlqb3Osa6CCS8IJoQQQgBAbTAa5l5epO34rJszibJI8rxLfGzcp1dRosutGeb2VDNgqYrwTiPNsLxXiPi3dz7LiS1WBRBDBOnqEjyy3aQb+/bLiJzz9dIkscVBBLxMfSEac7kO4Fpkngi0ruNBeSOal+u8jgOuqPz12nryMLCniEjtOOOmpt+KEIqsEdocJjYXwrh9OZqWJQyPCTo67LNS/TdxLAv6R5ZNK9npEjbYdT33gRo4o5oTqR34R+OmaSzDBWsAIPhuRcgyoteNi9gF0KzNYWVItPf2TLoXEg+7isNC7uJkgo1iQWOfRSP9NR11RtbZZ3OMG/VhL6jvx+J1m87+RCfJChAtEBQkSBX2PnSiihc/Twh3j0h7qdYQAoRVsRGmq7HU2QRbaxVGa1D6nIOqaIWRjyRZpHMQKWKpZM5feA+lzC4ZFultV8S6T0mzQGhQohi5I8iw+CsqBSxhFMuwyLgSwbghGb0AiIKkSDmGZVmJSiKihsiyOAUs70UkywooYP0bii9GdH4sfr1UNysd3fUyLLMQN+rsmo3grHl9VNJHbbwxoa47Vw5gupIqrZcjPh9R4Nye3nRDk199V+aetmvVtDRE8/+cbgAAgMIWGb3UA0MGLE9SCbWX670TDy1y98c3D27eppUjsZ6fql3jcd5rUe7+ZIlLNQny3Rd+E5Tct3WVhTM5RBCEdiEK0b6B+/ca2gYU393nFj/n1AygRQxPIUA043M42u85+z2SnssKrPl8Mx76NL3E6eXc3be7OD+H4WHbJkKI8AU8irbITQjZ+0hQcPEgId/Fn/pl9crKH02+5o2b9T/eMx7pKoskYgAAAABJRU5ErkJggg==`
|
||||||
Name string
|
|
||||||
V1 int64
|
func gopherPNG() io.Reader { return base64.NewDecoder(base64.StdEncoding, strings.NewReader(gopher)) }
|
||||||
V2 int32
|
|
||||||
V3 int
|
func TestTransform(t *testing.T) {
|
||||||
V4 uint64
|
c := qt.New(t)
|
||||||
|
|
||||||
|
createTransformer := func(spec *Spec, filename, content string) Transformer {
|
||||||
|
filename = filepath.FromSlash(filename)
|
||||||
|
fs := spec.Fs.Source
|
||||||
|
afero.WriteFile(fs, filename, []byte(content), 0777)
|
||||||
|
r, _ := spec.New(ResourceSourceDescriptor{Fs: fs, SourceFilename: filename})
|
||||||
|
return r.(Transformer)
|
||||||
|
}
|
||||||
|
|
||||||
|
createContentReplacer := func(name, old, new string) ResourceTransformation {
|
||||||
|
return &testTransformation{
|
||||||
|
name: name,
|
||||||
|
transform: func(ctx *ResourceTransformationCtx) error {
|
||||||
|
in := helpers.ReaderToString(ctx.From)
|
||||||
|
in = strings.Replace(in, old, new, 1)
|
||||||
|
ctx.AddOutPathIdentifier("." + name)
|
||||||
|
fmt.Fprint(ctx.To, in)
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify that we publish the same file once only.
|
||||||
|
assertNoDuplicateWrites := func(c *qt.C, spec *Spec) {
|
||||||
|
c.Helper()
|
||||||
|
d := spec.Fs.Destination.(hugofs.DuplicatesReporter)
|
||||||
|
c.Assert(d.ReportDuplicates(), qt.Equals, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
assertShouldExist := func(c *qt.C, spec *Spec, filename string, should bool) {
|
||||||
|
c.Helper()
|
||||||
|
exists, _ := helpers.Exists(filepath.FromSlash(filename), spec.Fs.Destination)
|
||||||
|
c.Assert(exists, qt.Equals, should)
|
||||||
|
}
|
||||||
|
|
||||||
|
c.Run("All values", func(c *qt.C) {
|
||||||
|
c.Parallel()
|
||||||
|
|
||||||
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
|
transformation := &testTransformation{
|
||||||
|
name: "test",
|
||||||
|
transform: func(ctx *ResourceTransformationCtx) error {
|
||||||
|
// Content
|
||||||
|
in := helpers.ReaderToString(ctx.From)
|
||||||
|
in = strings.Replace(in, "blue", "green", 1)
|
||||||
|
fmt.Fprint(ctx.To, in)
|
||||||
|
|
||||||
|
// Media type
|
||||||
|
ctx.OutMediaType = media.CSVType
|
||||||
|
|
||||||
|
// Change target
|
||||||
|
ctx.ReplaceOutPathExtension(".csv")
|
||||||
|
|
||||||
|
// Add some data to context
|
||||||
|
ctx.Data["mydata"] = "Hugo Rocks!"
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
r := createTransformer(spec, "f1.txt", "color is blue")
|
||||||
|
|
||||||
|
tr, err := r.Transform(transformation)
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
content, err := tr.(resource.ContentProvider).Content()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
|
c.Assert(content, qt.Equals, "color is green")
|
||||||
|
c.Assert(tr.MediaType(), eq, media.CSVType)
|
||||||
|
c.Assert(tr.RelPermalink(), qt.Equals, "/f1.csv")
|
||||||
|
assertShouldExist(c, spec, "public/f1.csv", true)
|
||||||
|
|
||||||
|
data := tr.Data().(map[string]interface{})
|
||||||
|
c.Assert(data["mydata"], qt.Equals, "Hugo Rocks!")
|
||||||
|
|
||||||
|
assertNoDuplicateWrites(c, spec)
|
||||||
|
})
|
||||||
|
|
||||||
|
c.Run("Meta only", func(c *qt.C) {
|
||||||
|
c.Parallel()
|
||||||
|
|
||||||
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
|
transformation := &testTransformation{
|
||||||
|
name: "test",
|
||||||
|
transform: func(ctx *ResourceTransformationCtx) error {
|
||||||
|
// Change media type only
|
||||||
|
ctx.OutMediaType = media.CSVType
|
||||||
|
ctx.ReplaceOutPathExtension(".csv")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
r := createTransformer(spec, "f1.txt", "color is blue")
|
||||||
|
|
||||||
|
tr, err := r.Transform(transformation)
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
content, err := tr.(resource.ContentProvider).Content()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
|
c.Assert(content, qt.Equals, "color is blue")
|
||||||
|
c.Assert(tr.MediaType(), eq, media.CSVType)
|
||||||
|
|
||||||
|
// The transformed file should only be published if RelPermalink
|
||||||
|
// or Permalink is called.
|
||||||
|
n := htesting.RandIntn(3)
|
||||||
|
shouldExist := true
|
||||||
|
switch n {
|
||||||
|
case 0:
|
||||||
|
tr.RelPermalink()
|
||||||
|
case 1:
|
||||||
|
tr.Permalink()
|
||||||
|
default:
|
||||||
|
shouldExist = false
|
||||||
|
}
|
||||||
|
|
||||||
|
assertShouldExist(c, spec, "public/f1.csv", shouldExist)
|
||||||
|
assertNoDuplicateWrites(c, spec)
|
||||||
|
})
|
||||||
|
|
||||||
|
c.Run("Memory-cached transformation", func(c *qt.C) {
|
||||||
|
c.Parallel()
|
||||||
|
|
||||||
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
|
// Two transformations with same id, different behaviour.
|
||||||
|
t1 := createContentReplacer("t1", "blue", "green")
|
||||||
|
t2 := createContentReplacer("t1", "color", "car")
|
||||||
|
|
||||||
|
for i, transformation := range []ResourceTransformation{t1, t2} {
|
||||||
|
r := createTransformer(spec, "f1.txt", "color is blue")
|
||||||
|
tr, _ := r.Transform(transformation)
|
||||||
|
content, err := tr.(resource.ContentProvider).Content()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(content, qt.Equals, "color is green", qt.Commentf("i=%d", i))
|
||||||
|
|
||||||
|
assertShouldExist(c, spec, "public/f1.t1.txt", false)
|
||||||
|
}
|
||||||
|
|
||||||
|
assertNoDuplicateWrites(c, spec)
|
||||||
|
})
|
||||||
|
|
||||||
|
c.Run("File-cached transformation", func(c *qt.C) {
|
||||||
|
c.Parallel()
|
||||||
|
|
||||||
|
fs := afero.NewMemMapFs()
|
||||||
|
|
||||||
|
for i := 0; i < 2; i++ {
|
||||||
|
spec := newTestResourceSpec(specDescriptor{c: c, fs: fs})
|
||||||
|
|
||||||
|
r := createTransformer(spec, "f1.txt", "color is blue")
|
||||||
|
|
||||||
|
var transformation ResourceTransformation
|
||||||
|
|
||||||
|
if i == 0 {
|
||||||
|
// There is currently a hardcoded list of transformations that we
|
||||||
|
// persist to disk (tocss, postcss).
|
||||||
|
transformation = &testTransformation{
|
||||||
|
name: "tocss",
|
||||||
|
transform: func(ctx *ResourceTransformationCtx) error {
|
||||||
|
in := helpers.ReaderToString(ctx.From)
|
||||||
|
in = strings.Replace(in, "blue", "green", 1)
|
||||||
|
ctx.AddOutPathIdentifier("." + "cached")
|
||||||
|
ctx.OutMediaType = media.CSVType
|
||||||
|
ctx.Data = map[string]interface{}{
|
||||||
|
"Hugo": "Rocks!",
|
||||||
|
}
|
||||||
|
fmt.Fprint(ctx.To, in)
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Force read from file cache.
|
||||||
|
transformation = &testTransformation{
|
||||||
|
name: "tocss",
|
||||||
|
transform: func(ctx *ResourceTransformationCtx) error {
|
||||||
|
return herrors.ErrFeatureNotAvailable
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
msg := qt.Commentf("i=%d", i)
|
||||||
|
|
||||||
|
tr, _ := r.Transform(transformation)
|
||||||
|
c.Assert(tr.RelPermalink(), qt.Equals, "/f1.cached.txt", msg)
|
||||||
|
content, err := tr.(resource.ContentProvider).Content()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(content, qt.Equals, "color is green", msg)
|
||||||
|
c.Assert(tr.MediaType(), eq, media.CSVType)
|
||||||
|
c.Assert(tr.Data(), qt.DeepEquals, map[string]interface{}{
|
||||||
|
"Hugo": "Rocks!",
|
||||||
|
})
|
||||||
|
|
||||||
|
assertNoDuplicateWrites(c, spec)
|
||||||
|
assertShouldExist(c, spec, "public/f1.cached.txt", true)
|
||||||
|
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
c.Run("Access RelPermalink first", func(c *qt.C) {
|
||||||
|
c.Parallel()
|
||||||
|
|
||||||
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
|
t1 := createContentReplacer("t1", "blue", "green")
|
||||||
|
|
||||||
|
r := createTransformer(spec, "f1.txt", "color is blue")
|
||||||
|
|
||||||
|
tr, _ := r.Transform(t1)
|
||||||
|
|
||||||
|
relPermalink := tr.RelPermalink()
|
||||||
|
|
||||||
|
content, err := tr.(resource.ContentProvider).Content()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
|
c.Assert(relPermalink, qt.Equals, "/f1.t1.txt")
|
||||||
|
c.Assert(content, qt.Equals, "color is green")
|
||||||
|
c.Assert(tr.MediaType(), eq, media.TextType)
|
||||||
|
|
||||||
|
assertNoDuplicateWrites(c, spec)
|
||||||
|
assertShouldExist(c, spec, "public/f1.t1.txt", true)
|
||||||
|
})
|
||||||
|
|
||||||
|
c.Run("Content two", func(c *qt.C) {
|
||||||
|
c.Parallel()
|
||||||
|
|
||||||
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
|
t1 := createContentReplacer("t1", "blue", "green")
|
||||||
|
t2 := createContentReplacer("t1", "color", "car")
|
||||||
|
|
||||||
|
r := createTransformer(spec, "f1.txt", "color is blue")
|
||||||
|
|
||||||
|
tr, _ := r.Transform(t1, t2)
|
||||||
|
content, err := tr.(resource.ContentProvider).Content()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
|
c.Assert(content, qt.Equals, "car is green")
|
||||||
|
c.Assert(tr.MediaType(), eq, media.TextType)
|
||||||
|
|
||||||
|
assertNoDuplicateWrites(c, spec)
|
||||||
|
})
|
||||||
|
|
||||||
|
c.Run("Content two chained", func(c *qt.C) {
|
||||||
|
c.Parallel()
|
||||||
|
|
||||||
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
|
t1 := createContentReplacer("t1", "blue", "green")
|
||||||
|
t2 := createContentReplacer("t2", "color", "car")
|
||||||
|
|
||||||
|
r := createTransformer(spec, "f1.txt", "color is blue")
|
||||||
|
|
||||||
|
tr1, _ := r.Transform(t1)
|
||||||
|
tr2, _ := tr1.Transform(t2)
|
||||||
|
|
||||||
|
content1, err := tr1.(resource.ContentProvider).Content()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
content2, err := tr2.(resource.ContentProvider).Content()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
|
c.Assert(content1, qt.Equals, "color is green")
|
||||||
|
c.Assert(content2, qt.Equals, "car is green")
|
||||||
|
|
||||||
|
assertNoDuplicateWrites(c, spec)
|
||||||
|
})
|
||||||
|
|
||||||
|
c.Run("Content many", func(c *qt.C) {
|
||||||
|
c.Parallel()
|
||||||
|
|
||||||
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
|
const count = 26 // A-Z
|
||||||
|
|
||||||
|
transformations := make([]ResourceTransformation, count)
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
transformations[i] = createContentReplacer(fmt.Sprintf("t%d", i), fmt.Sprint(i), string(i+65))
|
||||||
|
}
|
||||||
|
|
||||||
|
var countstr strings.Builder
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
countstr.WriteString(fmt.Sprint(i))
|
||||||
|
}
|
||||||
|
|
||||||
|
r := createTransformer(spec, "f1.txt", countstr.String())
|
||||||
|
|
||||||
|
tr, _ := r.Transform(transformations...)
|
||||||
|
content, err := tr.(resource.ContentProvider).Content()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
|
c.Assert(content, qt.Equals, "ABCDEFGHIJKLMNOPQRSTUVWXYZ")
|
||||||
|
|
||||||
|
assertNoDuplicateWrites(c, spec)
|
||||||
|
})
|
||||||
|
|
||||||
|
c.Run("Image", func(c *qt.C) {
|
||||||
|
c.Parallel()
|
||||||
|
|
||||||
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
|
transformation := &testTransformation{
|
||||||
|
name: "test",
|
||||||
|
transform: func(ctx *ResourceTransformationCtx) error {
|
||||||
|
ctx.AddOutPathIdentifier(".changed")
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
r := createTransformer(spec, "gopher.png", helpers.ReaderToString(gopherPNG()))
|
||||||
|
|
||||||
|
tr, err := r.Transform(transformation)
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(tr.MediaType(), eq, media.PNGType)
|
||||||
|
|
||||||
|
img, ok := tr.(resource.Image)
|
||||||
|
c.Assert(ok, qt.Equals, true)
|
||||||
|
|
||||||
|
c.Assert(img.Width(), qt.Equals, 75)
|
||||||
|
c.Assert(img.Height(), qt.Equals, 60)
|
||||||
|
|
||||||
|
// RelPermalink called.
|
||||||
|
resizedPublished1, err := img.Resize("40x40")
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(resizedPublished1.Height(), qt.Equals, 40)
|
||||||
|
c.Assert(resizedPublished1.RelPermalink(), qt.Equals, "/gopher.changed_hu2e827f5a78333ebc04166dd643235dea_1462_40x40_resize_linear_2.png")
|
||||||
|
assertShouldExist(c, spec, "public/gopher.changed_hu2e827f5a78333ebc04166dd643235dea_1462_40x40_resize_linear_2.png", true)
|
||||||
|
|
||||||
|
// Permalink called.
|
||||||
|
resizedPublished2, err := img.Resize("30x30")
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(resizedPublished2.Height(), qt.Equals, 30)
|
||||||
|
c.Assert(resizedPublished2.Permalink(), qt.Equals, "https://example.com/gopher.changed_hu2e827f5a78333ebc04166dd643235dea_1462_30x30_resize_linear_2.png")
|
||||||
|
assertShouldExist(c, spec, "public/gopher.changed_hu2e827f5a78333ebc04166dd643235dea_1462_30x30_resize_linear_2.png", true)
|
||||||
|
|
||||||
|
// Not published because none of RelPermalink or Permalink was called.
|
||||||
|
resizedNotPublished, err := img.Resize("50x50")
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(resizedNotPublished.Height(), qt.Equals, 50)
|
||||||
|
//c.Assert(resized.RelPermalink(), qt.Equals, "/gopher.changed_hu2e827f5a78333ebc04166dd643235dea_1462_50x50_resize_linear_2.png")
|
||||||
|
assertShouldExist(c, spec, "public/gopher.changed_hu2e827f5a78333ebc04166dd643235dea_1462_50x50_resize_linear_2.png", false)
|
||||||
|
|
||||||
|
assertNoDuplicateWrites(c, spec)
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
c.Run("Concurrent", func(c *qt.C) {
|
||||||
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
|
transformers := make([]Transformer, 10)
|
||||||
|
transformations := make([]ResourceTransformation, 10)
|
||||||
|
|
||||||
|
for i := 0; i < 10; i++ {
|
||||||
|
transformers[i] = createTransformer(spec, fmt.Sprintf("f%d.txt", i), fmt.Sprintf("color is %d", i))
|
||||||
|
transformations[i] = createContentReplacer("test", strconv.Itoa(i), "blue")
|
||||||
|
}
|
||||||
|
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
|
||||||
|
for i := 0; i < 13; i++ {
|
||||||
|
wg.Add(1)
|
||||||
|
go func(i int) {
|
||||||
|
defer wg.Done()
|
||||||
|
for j := 0; j < 23; j++ {
|
||||||
|
id := (i + j) % 10
|
||||||
|
tr, err := transformers[id].Transform(transformations[id])
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
content, err := tr.(resource.ContentProvider).Content()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(content, qt.Equals, "color is blue")
|
||||||
|
c.Assert(tr.RelPermalink(), qt.Equals, fmt.Sprintf("/f%d.test.txt", id))
|
||||||
|
}
|
||||||
|
}(i)
|
||||||
|
}
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
|
assertNoDuplicateWrites(c, spec)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestResourceTransformationKey(t *testing.T) {
|
type testTransformation struct {
|
||||||
// We really need this key to be portable across OSes.
|
name string
|
||||||
key := NewResourceTransformationKey("testing",
|
transform func(ctx *ResourceTransformationCtx) error
|
||||||
testStruct{Name: "test", V1: int64(10), V2: int32(20), V3: 30, V4: uint64(40)})
|
}
|
||||||
c := qt.New(t)
|
|
||||||
c.Assert("testing_518996646957295636", qt.Equals, key.key())
|
func (t *testTransformation) Key() internal.ResourceTransformationKey {
|
||||||
|
return internal.NewResourceTransformationKey(t.name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *testTransformation) Transform(ctx *ResourceTransformationCtx) error {
|
||||||
|
return t.transform(ctx)
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,9 @@ import (
|
||||||
_errors "github.com/pkg/errors"
|
_errors "github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/deps"
|
"github.com/gohugoio/hugo/deps"
|
||||||
|
"github.com/gohugoio/hugo/resources"
|
||||||
"github.com/gohugoio/hugo/resources/resource"
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/resources/resource_factories/bundler"
|
"github.com/gohugoio/hugo/resources/resource_factories/bundler"
|
||||||
"github.com/gohugoio/hugo/resources/resource_factories/create"
|
"github.com/gohugoio/hugo/resources/resource_factories/create"
|
||||||
"github.com/gohugoio/hugo/resources/resource_transformers/integrity"
|
"github.com/gohugoio/hugo/resources/resource_transformers/integrity"
|
||||||
|
@ -174,7 +176,7 @@ func (ns *Namespace) ExecuteAsTemplate(args ...interface{}) (resource.Resource,
|
||||||
}
|
}
|
||||||
data := args[1]
|
data := args[1]
|
||||||
|
|
||||||
r, ok := args[2].(resource.Resource)
|
r, ok := args[2].(resources.ResourceTransformer)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, fmt.Errorf("type %T not supported in Resource transformations", args[2])
|
return nil, fmt.Errorf("type %T not supported in Resource transformations", args[2])
|
||||||
}
|
}
|
||||||
|
@ -201,9 +203,9 @@ func (ns *Namespace) Fingerprint(args ...interface{}) (resource.Resource, error)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
r, ok := args[resIdx].(resource.Resource)
|
r, ok := args[resIdx].(resources.ResourceTransformer)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, fmt.Errorf("%T is not a Resource", args[resIdx])
|
return nil, fmt.Errorf("%T can not be transformed", args[resIdx])
|
||||||
}
|
}
|
||||||
|
|
||||||
return ns.integrityClient.Fingerprint(r, algo)
|
return ns.integrityClient.Fingerprint(r, algo)
|
||||||
|
@ -211,7 +213,7 @@ func (ns *Namespace) Fingerprint(args ...interface{}) (resource.Resource, error)
|
||||||
|
|
||||||
// Minify minifies the given Resource using the MediaType to pick the correct
|
// Minify minifies the given Resource using the MediaType to pick the correct
|
||||||
// minifier.
|
// minifier.
|
||||||
func (ns *Namespace) Minify(r resource.Resource) (resource.Resource, error) {
|
func (ns *Namespace) Minify(r resources.ResourceTransformer) (resource.Resource, error) {
|
||||||
return ns.minifyClient.Minify(r)
|
return ns.minifyClient.Minify(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -219,7 +221,7 @@ func (ns *Namespace) Minify(r resource.Resource) (resource.Resource, error) {
|
||||||
// object or a target path (string) as first argument.
|
// object or a target path (string) as first argument.
|
||||||
func (ns *Namespace) ToCSS(args ...interface{}) (resource.Resource, error) {
|
func (ns *Namespace) ToCSS(args ...interface{}) (resource.Resource, error) {
|
||||||
var (
|
var (
|
||||||
r resource.Resource
|
r resources.ResourceTransformer
|
||||||
m map[string]interface{}
|
m map[string]interface{}
|
||||||
targetPath string
|
targetPath string
|
||||||
err error
|
err error
|
||||||
|
@ -266,7 +268,7 @@ func (ns *Namespace) PostCSS(args ...interface{}) (resource.Resource, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// We allow string or a map as the first argument in some cases.
|
// We allow string or a map as the first argument in some cases.
|
||||||
func (ns *Namespace) resolveIfFirstArgIsString(args []interface{}) (resource.Resource, string, bool) {
|
func (ns *Namespace) resolveIfFirstArgIsString(args []interface{}) (resources.ResourceTransformer, string, bool) {
|
||||||
if len(args) != 2 {
|
if len(args) != 2 {
|
||||||
return nil, "", false
|
return nil, "", false
|
||||||
}
|
}
|
||||||
|
@ -275,26 +277,26 @@ func (ns *Namespace) resolveIfFirstArgIsString(args []interface{}) (resource.Res
|
||||||
if !ok1 {
|
if !ok1 {
|
||||||
return nil, "", false
|
return nil, "", false
|
||||||
}
|
}
|
||||||
v2, ok2 := args[1].(resource.Resource)
|
v2, ok2 := args[1].(resources.ResourceTransformer)
|
||||||
|
|
||||||
return v2, v1, ok2
|
return v2, v1, ok2
|
||||||
}
|
}
|
||||||
|
|
||||||
// This roundabout way of doing it is needed to get both pipeline behaviour and options as arguments.
|
// This roundabout way of doing it is needed to get both pipeline behaviour and options as arguments.
|
||||||
func (ns *Namespace) resolveArgs(args []interface{}) (resource.Resource, map[string]interface{}, error) {
|
func (ns *Namespace) resolveArgs(args []interface{}) (resources.ResourceTransformer, map[string]interface{}, error) {
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
return nil, nil, errors.New("no Resource provided in transformation")
|
return nil, nil, errors.New("no Resource provided in transformation")
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(args) == 1 {
|
if len(args) == 1 {
|
||||||
r, ok := args[0].(resource.Resource)
|
r, ok := args[0].(resources.ResourceTransformer)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, nil, fmt.Errorf("type %T not supported in Resource transformations", args[0])
|
return nil, nil, fmt.Errorf("type %T not supported in Resource transformations", args[0])
|
||||||
}
|
}
|
||||||
return r, nil, nil
|
return r, nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
r, ok := args[1].(resource.Resource)
|
r, ok := args[1].(resources.ResourceTransformer)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, nil, fmt.Errorf("type %T not supported in Resource transformations", args[0])
|
return nil, nil, fmt.Errorf("type %T not supported in Resource transformations", args[0])
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue