mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
hugolib: Compare every element in pages cache
It is slightly slower, but correctnes is, of course, more important: ```bash benchmark old ns/op new ns/op delta BenchmarkSortByWeightAndReverse-4 367 645 +75.75% benchmark old allocs new allocs delta BenchmarkSortByWeightAndReverse-4 2 2 +0.00% benchmark old bytes new bytes delta BenchmarkSortByWeightAndReverse-4 64 64 +0.00% ``` Running the same benchmark without any cache (i.e. resorting the slice on every iteration) and then compare it to the current version shows that it still is plenty worth it: ```bash ▶ benchcmp 2.bench 1.bench benchmark old ns/op new ns/op delta BenchmarkSortByWeightAndReverse-4 1358757 645 -99.95% benchmark old allocs new allocs delta BenchmarkSortByWeightAndReverse-4 17159 2 -99.99% benchmark old bytes new bytes delta BenchmarkSortByWeightAndReverse-4 274573 64 -99.98% ``` Closes #5239
This commit is contained in:
parent
058cc6c2c3
commit
ed4f1edbd7
4 changed files with 8 additions and 16 deletions
|
@ -27,7 +27,7 @@ func (entry pageCacheEntry) matches(pageLists []Pages) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
for i, p := range pageLists {
|
for i, p := range pageLists {
|
||||||
if !fastEqualPages(p, entry.in[i]) {
|
if !pagesEqual(p, entry.in[i]) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -103,10 +103,8 @@ func (c *pageCache) getP(key string, apply func(p *Pages), pageLists ...Pages) (
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// "fast" as in: we do not compare every element for big slices, but that is
|
// pagesEqual returns whether p1 and p2 are equal.
|
||||||
// good enough for our use cases.
|
func pagesEqual(p1, p2 Pages) bool {
|
||||||
// TODO(bep) there is a similar method in pagination.go. DRY.
|
|
||||||
func fastEqualPages(p1, p2 Pages) bool {
|
|
||||||
if p1 == nil && p2 == nil {
|
if p1 == nil && p2 == nil {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
@ -123,13 +121,7 @@ func fastEqualPages(p1, p2 Pages) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
step := 1
|
for i := 0; i < len(p1); i++ {
|
||||||
|
|
||||||
if len(p1) >= 50 {
|
|
||||||
step = len(p1) / 10
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := 0; i < len(p1); i += step {
|
|
||||||
if p1[i] != p2[i] {
|
if p1[i] != p2[i] {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,8 +57,8 @@ func TestPageCache(t *testing.T) {
|
||||||
l1.Unlock()
|
l1.Unlock()
|
||||||
p2, c2 := c1.get("k1", nil, p)
|
p2, c2 := c1.get("k1", nil, p)
|
||||||
assert.True(t, c2)
|
assert.True(t, c2)
|
||||||
assert.True(t, fastEqualPages(p, p2))
|
assert.True(t, pagesEqual(p, p2))
|
||||||
assert.True(t, fastEqualPages(p, pages))
|
assert.True(t, pagesEqual(p, pages))
|
||||||
assert.NotNil(t, p)
|
assert.NotNil(t, p)
|
||||||
|
|
||||||
l2.Lock()
|
l2.Lock()
|
||||||
|
|
|
@ -114,7 +114,7 @@ func TestPageSortReverse(t *testing.T) {
|
||||||
assert.Equal(t, 9, p2[0].fuzzyWordCount)
|
assert.Equal(t, 9, p2[0].fuzzyWordCount)
|
||||||
assert.Equal(t, 0, p2[9].fuzzyWordCount)
|
assert.Equal(t, 0, p2[9].fuzzyWordCount)
|
||||||
// cached
|
// cached
|
||||||
assert.True(t, fastEqualPages(p2, p1.Reverse()))
|
assert.True(t, pagesEqual(p2, p1.Reverse()))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPageSortByParam(t *testing.T) {
|
func TestPageSortByParam(t *testing.T) {
|
||||||
|
|
|
@ -154,7 +154,7 @@ func newSearchIndexHandler(cfg related.Config) *relatedDocsHandler {
|
||||||
// This assumes that a lock has been acquired.
|
// This assumes that a lock has been acquired.
|
||||||
func (s *relatedDocsHandler) getIndex(p Pages) *related.InvertedIndex {
|
func (s *relatedDocsHandler) getIndex(p Pages) *related.InvertedIndex {
|
||||||
for _, ci := range s.postingLists {
|
for _, ci := range s.postingLists {
|
||||||
if fastEqualPages(p, ci.p) {
|
if pagesEqual(p, ci.p) {
|
||||||
return ci.postingList
|
return ci.postingList
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue