Consolidate all hashing to the common/hashing package

And remove now unsued hashing funcs.
This commit is contained in:
Bjørn Erik Pedersen 2024-07-30 15:47:34 +02:00
parent d5eda13cb2
commit e67886c038
125 changed files with 177 additions and 368 deletions

View file

@ -15,11 +15,15 @@
package hashing
import (
"crypto/md5"
"encoding/hex"
"io"
"strconv"
"sync"
"github.com/cespare/xxhash/v2"
"github.com/gohugoio/hashstructure"
"github.com/gohugoio/hugo/identity"
)
// XXHashFromReader calculates the xxHash for the given reader.
@ -50,6 +54,82 @@ func XxHashFromStringHexEncoded(f string) string {
return hex.EncodeToString(hash)
}
// MD5FromStringHexEncoded returns the MD5 hash of the given string.
func MD5FromStringHexEncoded(f string) string {
h := md5.New()
h.Write([]byte(f))
return hex.EncodeToString(h.Sum(nil))
}
// HashString returns a hash from the given elements.
// It will panic if the hash cannot be calculated.
// Note that this hash should be used primarily for identity, not for change detection as
// it in the more complex values (e.g. Page) will not hash the full content.
func HashString(vs ...any) string {
hash := HashUint64(vs...)
return strconv.FormatUint(hash, 10)
}
var hashOptsPool = sync.Pool{
New: func() any {
return &hashstructure.HashOptions{
Hasher: xxhash.New(),
}
},
}
func getHashOpts() *hashstructure.HashOptions {
return hashOptsPool.Get().(*hashstructure.HashOptions)
}
func putHashOpts(opts *hashstructure.HashOptions) {
opts.Hasher.Reset()
hashOptsPool.Put(opts)
}
// HashUint64 returns a hash from the given elements.
// It will panic if the hash cannot be calculated.
// Note that this hash should be used primarily for identity, not for change detection as
// it in the more complex values (e.g. Page) will not hash the full content.
func HashUint64(vs ...any) uint64 {
var o any
if len(vs) == 1 {
o = toHashable(vs[0])
} else {
elements := make([]any, len(vs))
for i, e := range vs {
elements[i] = toHashable(e)
}
o = elements
}
hashOpts := getHashOpts()
defer putHashOpts(hashOpts)
hash, err := hashstructure.Hash(o, hashOpts)
if err != nil {
panic(err)
}
return hash
}
type keyer interface {
Key() string
}
// For structs, hashstructure.Hash only works on the exported fields,
// so rewrite the input slice for known identity types.
func toHashable(v any) any {
switch t := v.(type) {
case keyer:
return t.Key()
case identity.IdentityProvider:
return t.GetIdentity()
default:
return v
}
}
type xxhashReadFrom struct {
buff []byte
*xxhash.Digest

View file

@ -14,10 +14,11 @@
package hashing
import (
"fmt"
"math"
"strings"
"testing"
"github.com/cespare/xxhash/v2"
qt "github.com/frankban/quicktest"
)
@ -72,8 +73,47 @@ func BenchmarkXXHashFromStringHexEncoded(b *testing.B) {
}
}
func xxHashFromString(f string) uint64 {
h := xxhash.New()
h.WriteString(f)
return h.Sum64()
func TestHashString(t *testing.T) {
c := qt.New(t)
c.Assert(HashString("a", "b"), qt.Equals, "3176555414984061461")
c.Assert(HashString("ab"), qt.Equals, "7347350983217793633")
var vals []any = []any{"a", "b", tstKeyer{"c"}}
c.Assert(HashString(vals...), qt.Equals, "4438730547989914315")
c.Assert(vals[2], qt.Equals, tstKeyer{"c"})
}
type tstKeyer struct {
key string
}
func (t tstKeyer) Key() string {
return t.key
}
func (t tstKeyer) String() string {
return "key: " + t.key
}
func BenchmarkHashString(b *testing.B) {
word := " hello "
var tests []string
for i := 1; i <= 5; i++ {
sentence := strings.Repeat(word, int(math.Pow(4, float64(i))))
tests = append(tests, sentence)
}
b.ResetTimer()
for _, test := range tests {
b.Run(fmt.Sprintf("n%d", len(test)), func(b *testing.B) {
for i := 0; i < b.N; i++ {
HashString(test)
}
})
}
}

View file

@ -21,7 +21,7 @@ import (
"sync"
"github.com/bep/logg"
"github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/common/hashing"
)
// PanicOnWarningHook panics on warnings.
@ -85,7 +85,7 @@ func (h *logOnceHandler) HandleLog(e *logg.Entry) error {
}
h.mu.Lock()
defer h.mu.Unlock()
hash := identity.HashUint64(e.Level, e.Message, e.Fields)
hash := hashing.HashUint64(e.Level, e.Message, e.Fields)
if h.seen[hash] {
return errStop
}

View file

@ -16,13 +16,13 @@ package config
import (
"encoding/json"
"github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/common/hashing"
)
func DecodeNamespace[S, C any](configSource any, buildConfig func(any) (C, any, error)) (*ConfigNamespace[S, C], error) {
// Calculate the hash of the input (not including any defaults applied later).
// This allows us to introduce new config options without breaking the hash.
h := identity.HashString(configSource)
h := hashing.HashString(configSource)
// Build the config
c, ext, err := buildConfig(configSource)

View file

@ -15,8 +15,6 @@ package helpers
import (
"bytes"
"crypto/md5"
"encoding/hex"
"fmt"
"io"
"net"
@ -257,66 +255,6 @@ func SliceToLower(s []string) []string {
return l
}
// XXHashFromReader creates a xxHash hash from the given reader.
// MD5String takes a string and returns its MD5 hash.
func MD5String(f string) string {
h := md5.New()
h.Write([]byte(f))
return hex.EncodeToString(h.Sum([]byte{}))
}
// MD5FromReaderFast creates a MD5 hash from the given file. It only reads parts of
// the file for speed, so don't use it if the files are very subtly different.
// It will not close the file.
// It will return the MD5 hash and the size of r in bytes.
func MD5FromReaderFast(r io.ReadSeeker) (string, int64, error) {
const (
// Do not change once set in stone!
maxChunks = 8
peekSize = 64
seek = 2048
)
h := md5.New()
buff := make([]byte, peekSize)
for i := 0; i < maxChunks; i++ {
if i > 0 {
_, err := r.Seek(seek, 0)
if err != nil {
if err == io.EOF {
break
}
return "", 0, err
}
}
_, err := io.ReadAtLeast(r, buff, peekSize)
if err != nil {
if err == io.EOF || err == io.ErrUnexpectedEOF {
h.Write(buff)
break
}
return "", 0, err
}
h.Write(buff)
}
size, _ := r.Seek(0, io.SeekEnd)
return hex.EncodeToString(h.Sum(nil)), size, nil
}
// MD5FromReader creates a MD5 hash from the given reader.
func MD5FromReader(r io.Reader) (string, error) {
h := md5.New()
if _, err := io.Copy(h, r); err != nil {
return "", nil
}
return hex.EncodeToString(h.Sum(nil)), nil
}
// IsWhitespace determines if the given rune is whitespace.
func IsWhitespace(r rune) bool {
return r == ' ' || r == '\t' || r == '\n' || r == '\r'

View file

@ -14,7 +14,6 @@
package helpers_test
import (
"fmt"
"reflect"
"strings"
"testing"
@ -22,7 +21,6 @@ import (
"github.com/gohugoio/hugo/helpers"
qt "github.com/frankban/quicktest"
"github.com/spf13/afero"
)
func TestResolveMarkup(t *testing.T) {
@ -256,93 +254,6 @@ func TestUniqueStringsSorted(t *testing.T) {
c.Assert(helpers.UniqueStringsSorted(nil), qt.IsNil)
}
func TestFastMD5FromFile(t *testing.T) {
fs := afero.NewMemMapFs()
if err := afero.WriteFile(fs, "small.txt", []byte("abc"), 0o777); err != nil {
t.Fatal(err)
}
if err := afero.WriteFile(fs, "small2.txt", []byte("abd"), 0o777); err != nil {
t.Fatal(err)
}
if err := afero.WriteFile(fs, "bigger.txt", []byte(strings.Repeat("a bc d e", 100)), 0o777); err != nil {
t.Fatal(err)
}
if err := afero.WriteFile(fs, "bigger2.txt", []byte(strings.Repeat("c d e f g", 100)), 0o777); err != nil {
t.Fatal(err)
}
c := qt.New(t)
sf1, err := fs.Open("small.txt")
c.Assert(err, qt.IsNil)
sf2, err := fs.Open("small2.txt")
c.Assert(err, qt.IsNil)
bf1, err := fs.Open("bigger.txt")
c.Assert(err, qt.IsNil)
bf2, err := fs.Open("bigger2.txt")
c.Assert(err, qt.IsNil)
defer sf1.Close()
defer sf2.Close()
defer bf1.Close()
defer bf2.Close()
m1, _, err := helpers.MD5FromReaderFast(sf1)
c.Assert(err, qt.IsNil)
c.Assert(m1, qt.Equals, "e9c8989b64b71a88b4efb66ad05eea96")
m2, _, err := helpers.MD5FromReaderFast(sf2)
c.Assert(err, qt.IsNil)
c.Assert(m2, qt.Not(qt.Equals), m1)
m3, _, err := helpers.MD5FromReaderFast(bf1)
c.Assert(err, qt.IsNil)
c.Assert(m3, qt.Not(qt.Equals), m2)
m4, _, err := helpers.MD5FromReaderFast(bf2)
c.Assert(err, qt.IsNil)
c.Assert(m4, qt.Not(qt.Equals), m3)
m5, err := helpers.MD5FromReader(bf2)
c.Assert(err, qt.IsNil)
c.Assert(m5, qt.Not(qt.Equals), m4)
}
func BenchmarkMD5FromFileFast(b *testing.B) {
fs := afero.NewMemMapFs()
for _, full := range []bool{false, true} {
b.Run(fmt.Sprintf("full=%t", full), func(b *testing.B) {
for i := 0; i < b.N; i++ {
b.StopTimer()
if err := afero.WriteFile(fs, "file.txt", []byte(strings.Repeat("1234567890", 2000)), 0o777); err != nil {
b.Fatal(err)
}
f, err := fs.Open("file.txt")
if err != nil {
b.Fatal(err)
}
b.StartTimer()
if full {
if _, err := helpers.MD5FromReader(f); err != nil {
b.Fatal(err)
}
} else {
if _, _, err := helpers.MD5FromReaderFast(f); err != nil {
b.Fatal(err)
}
}
f.Close()
}
})
}
}
func BenchmarkUniqueStrings(b *testing.B) {
input := []string{"a", "b", "d", "e", "d", "h", "a", "i"}

View file

@ -22,7 +22,6 @@ import (
"time"
"github.com/gobuffalo/flect"
"github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/markup/converter"
xmaps "golang.org/x/exp/maps"
@ -32,6 +31,7 @@ import (
"github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/common/constants"
"github.com/gohugoio/hugo/common/hashing"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/maps"
@ -328,7 +328,7 @@ func (ps *pageState) setMetaPost(cascade map[page.PageMatcher]maps.Params) error
ps.m.setMetaPostCount++
var cascadeHashPre uint64
if ps.m.setMetaPostCount > 1 {
cascadeHashPre = identity.HashUint64(ps.m.pageConfig.CascadeCompiled)
cascadeHashPre = hashing.HashUint64(ps.m.pageConfig.CascadeCompiled)
ps.m.pageConfig.CascadeCompiled = xmaps.Clone[map[page.PageMatcher]maps.Params](ps.m.cascadeOriginal)
}
@ -360,7 +360,7 @@ func (ps *pageState) setMetaPost(cascade map[page.PageMatcher]maps.Params) error
}
if ps.m.setMetaPostCount > 1 {
ps.m.setMetaPostCascadeChanged = cascadeHashPre != identity.HashUint64(ps.m.pageConfig.CascadeCompiled)
ps.m.setMetaPostCascadeChanged = cascadeHashPre != hashing.HashUint64(ps.m.pageConfig.CascadeCompiled)
if !ps.m.setMetaPostCascadeChanged {
// No changes, restore any value that may be changed by aggregation.

View file

@ -23,13 +23,13 @@ import (
"time"
"github.com/bep/clocks"
"github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/markup/asciidocext"
"github.com/gohugoio/hugo/markup/rst"
"github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/common/hashing"
"github.com/gohugoio/hugo/common/htime"
"github.com/gohugoio/hugo/common/loggers"
@ -2040,8 +2040,8 @@ title: "p2"
b.Assert(p1, qt.Not(qt.Equals), p2)
b.Assert(identity.HashString(p1), qt.Not(qt.Equals), identity.HashString(p2))
b.Assert(identity.HashString(sites[0]), qt.Not(qt.Equals), identity.HashString(sites[1]))
b.Assert(hashing.HashString(p1), qt.Not(qt.Equals), hashing.HashString(p2))
b.Assert(hashing.HashString(sites[0]), qt.Not(qt.Equals), hashing.HashString(sites[1]))
}
// Issue #11243

View file

@ -19,12 +19,11 @@ import (
"path/filepath"
"testing"
"github.com/gohugoio/hugo/common/hashing"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/resources/kinds"
@ -701,13 +700,13 @@ bundle min min key: {{ $jsonMinMin.Key }}
b.AssertFileContent(index, fmt.Sprintf("data content unmarshaled: v%d", i))
b.AssertFileContent(index, fmt.Sprintf("data assets content unmarshaled: v%d", i))
md5Asset := helpers.MD5String(fmt.Sprintf(`vdata: v%d`, i))
md5Asset := hashing.MD5FromStringHexEncoded(fmt.Sprintf(`vdata: v%d`, i))
b.AssertFileContent(index, fmt.Sprintf("assets fingerprinted: /data%d/data.%s.yaml", i, md5Asset))
// The original is not used, make sure it's not published.
b.Assert(b.CheckExists(fmt.Sprintf("public/data%d/data.yaml", i)), qt.Equals, false)
md5Bundle := helpers.MD5String(fmt.Sprintf(`data: v%d`, i))
md5Bundle := hashing.MD5FromStringHexEncoded(fmt.Sprintf(`data: v%d`, i))
b.AssertFileContent(index, fmt.Sprintf("bundle fingerprinted: /bundle%d/data.%s.yaml", i, md5Bundle))
b.AssertFileContent(index,

View file

@ -19,6 +19,7 @@ import (
"io"
"path/filepath"
"github.com/gohugoio/hugo/common/hashing"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/helpers"
@ -219,7 +220,7 @@ type BuildState struct {
}
func (b *BuildState) hash(v any) uint64 {
return identity.HashUint64(v)
return hashing.HashUint64(v)
}
func (b *BuildState) checkHasChangedAndSetSourceInfo(changedPath string, v any) bool {

View file

@ -27,8 +27,8 @@ import (
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/common/hashing"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/resources/resource_transformers/tocss/scss"
)
@ -119,7 +119,7 @@ FAILED REMOTE ERROR DETAILS CONTENT: |failed to fetch remote resource: Not Imple
|StatusCode: 501|ContentLength: 16|ContentType: text/plain; charset=utf-8|
`, identity.HashString(ts.URL+"/sunset.jpg", map[string]any{})))
`, hashing.HashString(ts.URL+"/sunset.jpg", map[string]any{})))
b.AssertFileContent("public/styles.min.a1df58687c3c9cc38bf26532f7b4b2f2c2b0315dcde212376959995c04f11fef.css", "body{background-color:#add8e6}")
b.AssertFileContent("public//styles2.min.1cfc52986836405d37f9998a63fd6dd8608e8c410e5e3db1daaa30f78bc273ba.css", "body{background-color:orange}")

View file

@ -1,91 +0,0 @@
// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package identity
import (
"strconv"
"sync"
"github.com/cespare/xxhash/v2"
"github.com/gohugoio/hashstructure"
)
// HashString returns a hash from the given elements.
// It will panic if the hash cannot be calculated.
// Note that this hash should be used primarily for identity, not for change detection as
// it in the more complex values (e.g. Page) will not hash the full content.
func HashString(vs ...any) string {
hash := HashUint64(vs...)
return strconv.FormatUint(hash, 10)
}
var hashOptsPool = sync.Pool{
New: func() any {
return &hashstructure.HashOptions{
Hasher: xxhash.New(),
}
},
}
func getHashOpts() *hashstructure.HashOptions {
return hashOptsPool.Get().(*hashstructure.HashOptions)
}
func putHashOpts(opts *hashstructure.HashOptions) {
opts.Hasher.Reset()
hashOptsPool.Put(opts)
}
// HashUint64 returns a hash from the given elements.
// It will panic if the hash cannot be calculated.
// Note that this hash should be used primarily for identity, not for change detection as
// it in the more complex values (e.g. Page) will not hash the full content.
func HashUint64(vs ...any) uint64 {
var o any
if len(vs) == 1 {
o = toHashable(vs[0])
} else {
elements := make([]any, len(vs))
for i, e := range vs {
elements[i] = toHashable(e)
}
o = elements
}
hashOpts := getHashOpts()
defer putHashOpts(hashOpts)
hash, err := hashstructure.Hash(o, hashOpts)
if err != nil {
panic(err)
}
return hash
}
type keyer interface {
Key() string
}
// For structs, hashstructure.Hash only works on the exported fields,
// so rewrite the input slice for known identity types.
func toHashable(v any) any {
switch t := v.(type) {
case keyer:
return t.Key()
case IdentityProvider:
return t.GetIdentity()
default:
return v
}
}

View file

@ -1,68 +0,0 @@
// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package identity
import (
"fmt"
"math"
"strings"
"testing"
qt "github.com/frankban/quicktest"
)
func TestHashString(t *testing.T) {
c := qt.New(t)
c.Assert(HashString("a", "b"), qt.Equals, "3176555414984061461")
c.Assert(HashString("ab"), qt.Equals, "7347350983217793633")
var vals []any = []any{"a", "b", tstKeyer{"c"}}
c.Assert(HashString(vals...), qt.Equals, "4438730547989914315")
c.Assert(vals[2], qt.Equals, tstKeyer{"c"})
}
type tstKeyer struct {
key string
}
func (t tstKeyer) Key() string {
return t.key
}
func (t tstKeyer) String() string {
return "key: " + t.key
}
func BenchmarkHashString(b *testing.B) {
word := " hello "
var tests []string
for i := 1; i <= 5; i++ {
sentence := strings.Repeat(word, int(math.Pow(4, float64(i))))
tests = append(tests, sentence)
}
b.ResetTimer()
for _, test := range tests {
b.Run(fmt.Sprintf("n%d", len(test)), func(b *testing.B) {
for i := 0; i < b.N; i++ {
HashString(test)
}
})
}
}

View file

@ -25,9 +25,9 @@ import (
"sync"
"time"
"github.com/gohugoio/hugo/common/hashing"
"github.com/gohugoio/hugo/common/types"
"github.com/gohugoio/hugo/compare"
"github.com/gohugoio/hugo/identity"
)
// The Provider interface defines an interface for measuring metrics.
@ -241,7 +241,7 @@ func howSimilar(a, b any) int {
return 90
}
h1, h2 := identity.HashString(a), identity.HashString(b)
h1, h2 := hashing.HashString(a), hashing.HashString(b)
if h1 == h2 {
return 100
}

View file

@ -29,9 +29,9 @@ import (
color_extractor "github.com/marekm4/color-extractor"
"github.com/gohugoio/hugo/cache/filecache"
"github.com/gohugoio/hugo/common/hashing"
"github.com/gohugoio/hugo/common/hstrings"
"github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/identity"
"github.com/disintegration/gift"
@ -40,7 +40,6 @@ import (
"github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/resources/images"
// Blind import for image.Decode
@ -274,7 +273,7 @@ func (i *imageResource) Filter(filters ...any) (images.ImageResource, error) {
}
conf.Action = "filter"
conf.Key = identity.HashString(gfilters)
conf.Key = hashing.HashString(gfilters)
conf.TargetFormat = targetFormat
if conf.TargetFormat == 0 {
conf.TargetFormat = i.Format
@ -481,7 +480,7 @@ func (i *imageResource) getImageMetaCacheTargetPath() string {
df := i.getResourcePaths()
p1, _ := paths.FileAndExt(df.File)
h := i.hash()
idStr := identity.HashString(h, i.size(), imageMetaVersionNumber, cfgHash)
idStr := hashing.HashString(h, i.size(), imageMetaVersionNumber, cfgHash)
df.File = fmt.Sprintf("%s_%s.json", p1, idStr)
return df.TargetPath()
}
@ -504,7 +503,7 @@ func (i *imageResource) relTargetPathFromConfig(conf images.ImageConfig) interna
// can easily be too long to read, and maybe even too long
// for the different OSes to handle.
if len(p1)+len(idStr)+len(p2) > md5Threshold {
key = helpers.MD5String(p1 + key + p2)
key = hashing.MD5FromStringHexEncoded(p1 + key + p2)
huIdx := strings.Index(p1, "_hu")
if huIdx != -1 {
p1 = p1[:huIdx]

View file

@ -33,14 +33,13 @@ import (
"github.com/gohugoio/hugo/htesting"
"github.com/gohugoio/hugo/resources/images/webp"
"github.com/gohugoio/hugo/common/hashing"
"github.com/gohugoio/hugo/common/paths"
"github.com/spf13/afero"
"github.com/disintegration/gift"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resources/images"
"github.com/google/go-cmp/cmp"
@ -835,9 +834,9 @@ func assetGoldenDirs(c *qt.C, dir1, dir2 string) {
_, err = f2.Seek(0, 0)
c.Assert(err, qt.IsNil)
hash1, err := helpers.MD5FromReader(f1)
hash1, _, err := hashing.XXHashFromReader(f1)
c.Assert(err, qt.IsNil)
hash2, err := helpers.MD5FromReader(f2)
hash2, _, err := hashing.XXHashFromReader(f2)
c.Assert(err, qt.IsNil)
c.Assert(hash1, qt.Equals, hash2)

View file

@ -17,7 +17,7 @@ import (
"testing"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/common/hashing"
)
func TestFilterHash(t *testing.T) {
@ -25,8 +25,8 @@ func TestFilterHash(t *testing.T) {
f := &Filters{}
c.Assert(identity.HashString(f.Grayscale()), qt.Equals, identity.HashString(f.Grayscale()))
c.Assert(identity.HashString(f.Grayscale()), qt.Not(qt.Equals), identity.HashString(f.Invert()))
c.Assert(identity.HashString(f.Gamma(32)), qt.Not(qt.Equals), identity.HashString(f.Gamma(33)))
c.Assert(identity.HashString(f.Gamma(32)), qt.Equals, identity.HashString(f.Gamma(32)))
c.Assert(hashing.HashString(f.Grayscale()), qt.Equals, hashing.HashString(f.Grayscale()))
c.Assert(hashing.HashString(f.Grayscale()), qt.Not(qt.Equals), hashing.HashString(f.Invert()))
c.Assert(hashing.HashString(f.Gamma(32)), qt.Not(qt.Equals), hashing.HashString(f.Gamma(33)))
c.Assert(hashing.HashString(f.Gamma(32)), qt.Equals, hashing.HashString(f.Gamma(32)))
}

View file

@ -13,7 +13,7 @@
package internal
import "github.com/gohugoio/hugo/identity"
import "github.com/gohugoio/hugo/common/hashing"
// ResourceTransformationKey are provided by the different transformation implementations.
// It identifies the transformation (name) and its configuration (elements).
@ -38,5 +38,5 @@ func (k ResourceTransformationKey) Value() string {
return k.Name
}
return k.Name + "_" + identity.HashString(k.elements...)
return k.Name + "_" + hashing.HashString(k.elements...)
}

View file

@ -26,7 +26,6 @@ import (
"github.com/bep/logg"
"github.com/gohugoio/httpcache"
hhttpcache "github.com/gohugoio/hugo/cache/httpcache"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs/glob"
"github.com/gohugoio/hugo/identity"
@ -34,6 +33,7 @@ import (
"github.com/gohugoio/hugo/cache/dynacache"
"github.com/gohugoio/hugo/cache/filecache"
"github.com/gohugoio/hugo/common/hashing"
"github.com/gohugoio/hugo/common/hcontext"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/common/tasks"
@ -226,7 +226,7 @@ func (c *Client) match(name, pattern string, matchFunc func(r resource.Resource)
// TODO(bep) see #10912; we currently emit a warning for this config scenario.
func (c *Client) FromString(targetPath, content string) (resource.Resource, error) {
targetPath = path.Clean(targetPath)
key := dynacache.CleanKey(targetPath) + helpers.MD5String(content)
key := dynacache.CleanKey(targetPath) + hashing.MD5FromStringHexEncoded(content)
r, err := c.rs.ResourceCache.GetOrCreate(key, func() (resource.Resource, error) {
return c.rs.NewResource(
resources.ResourceSourceDescriptor{

View file

@ -29,6 +29,7 @@ import (
gmaps "maps"
"github.com/gohugoio/httpcache"
"github.com/gohugoio/hugo/common/hashing"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/maps"
@ -310,10 +311,10 @@ func (c *Client) validateFromRemoteArgs(uri string, options fromRemoteOptions) e
func remoteResourceKeys(uri string, optionsm map[string]any) (string, string) {
var userKey string
if key, k, found := maps.LookupEqualFold(optionsm, "key"); found {
userKey = identity.HashString(key)
userKey = hashing.HashString(key)
delete(optionsm, k)
}
optionsKey := identity.HashString(uri, optionsm)
optionsKey := hashing.HashString(uri, optionsm)
if userKey == "" {
userKey = optionsKey
}

Some files were not shown because too many files have changed in this diff Show more