mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
GetJSON/GetCSV: Add retry on invalid content
The retry gets triggered when the parsing of the content fails. Fixes #1166
This commit is contained in:
parent
be38acdce7
commit
cc5d63c37a
2 changed files with 169 additions and 22 deletions
|
@ -23,6 +23,7 @@ import (
|
|||
"net/url"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/spf13/afero"
|
||||
"github.com/spf13/hugo/helpers"
|
||||
|
@ -31,7 +32,11 @@ import (
|
|||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
var remoteURLLock = &remoteLock{m: make(map[string]*sync.Mutex)}
|
||||
var (
|
||||
remoteURLLock = &remoteLock{m: make(map[string]*sync.Mutex)}
|
||||
resSleep = time.Second * 2 // if JSON decoding failed sleep for n seconds before retrying
|
||||
resRetries = 1 // number of retries to load the JSON from URL or local file system
|
||||
)
|
||||
|
||||
type remoteLock struct {
|
||||
sync.RWMutex
|
||||
|
@ -90,13 +95,21 @@ func resWriteCache(id string, c []byte, fs afero.Fs) error {
|
|||
fID := getCacheFileID(id)
|
||||
f, err := fs.Create(fID)
|
||||
if err != nil {
|
||||
return err
|
||||
return errors.New("Error: " + err.Error() + ". Failed to create file: " + fID)
|
||||
}
|
||||
defer f.Close()
|
||||
n, err := f.Write(c)
|
||||
if n == 0 {
|
||||
return errors.New("No bytes written to file: " + fID)
|
||||
}
|
||||
return err
|
||||
if err != nil {
|
||||
return errors.New("Error: " + err.Error() + ". Failed to write to file: " + fID)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func resDeleteCache(id string, fs afero.Fs) error {
|
||||
return fs.Remove(getCacheFileID(id))
|
||||
}
|
||||
|
||||
// resGetRemote loads the content of a remote file. This method is thread safe.
|
||||
|
@ -177,18 +190,25 @@ func resGetResource(url string) ([]byte, error) {
|
|||
// If you provide multiple parts they will be joined together to the final URL.
|
||||
// GetJSON returns nil or parsed JSON to use in a short code.
|
||||
func GetJSON(urlParts ...string) interface{} {
|
||||
var v interface{}
|
||||
url := strings.Join(urlParts, "")
|
||||
|
||||
for i := 0; i <= resRetries; i++ {
|
||||
c, err := resGetResource(url)
|
||||
if err != nil {
|
||||
jww.ERROR.Printf("Failed to get json resource %s with error message %s", url, err)
|
||||
return nil
|
||||
}
|
||||
|
||||
var v interface{}
|
||||
err = json.Unmarshal(c, &v)
|
||||
if err != nil {
|
||||
jww.ERROR.Printf("Cannot read json from resource %s with error message %s", url, err)
|
||||
return nil
|
||||
jww.ERROR.Printf("Retry #%d for %s and sleeping for %s", i, url, resSleep)
|
||||
time.Sleep(resSleep)
|
||||
resDeleteCache(url, hugofs.SourceFs)
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
@ -212,16 +232,34 @@ func parseCSV(c []byte, sep string) ([][]string, error) {
|
|||
// If you provide multiple parts for the URL they will be joined together to the final URL.
|
||||
// GetCSV returns nil or a slice slice to use in a short code.
|
||||
func GetCSV(sep string, urlParts ...string) [][]string {
|
||||
var d [][]string
|
||||
url := strings.Join(urlParts, "")
|
||||
c, err := resGetResource(url)
|
||||
if err != nil {
|
||||
jww.ERROR.Printf("Failed to get csv resource %s with error message %s", url, err)
|
||||
return nil
|
||||
|
||||
var clearCacheSleep = func(i int, u string) {
|
||||
jww.ERROR.Printf("Retry #%d for %s and sleeping for %s", i, url, resSleep)
|
||||
time.Sleep(resSleep)
|
||||
resDeleteCache(url, hugofs.SourceFs)
|
||||
}
|
||||
d, err := parseCSV(c, sep)
|
||||
|
||||
for i := 0; i <= resRetries; i++ {
|
||||
c, err := resGetResource(url)
|
||||
|
||||
if err == nil && false == bytes.Contains(c, []byte(sep)) {
|
||||
err = errors.New("Cannot find separator " + sep + " in CSV.")
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
jww.ERROR.Printf("Failed to read csv resource %s with error message %s", url, err)
|
||||
return nil
|
||||
clearCacheSleep(i, url)
|
||||
continue
|
||||
}
|
||||
|
||||
if d, err = parseCSV(c, sep); err != nil {
|
||||
jww.ERROR.Printf("Failed to parse csv file %s with error message %s", url, err)
|
||||
clearCacheSleep(i, url)
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
|
|
@ -15,14 +15,20 @@ package tpl
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/spf13/afero"
|
||||
"github.com/spf13/hugo/helpers"
|
||||
"github.com/spf13/hugo/hugofs"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestScpCache(t *testing.T) {
|
||||
|
@ -195,3 +201,106 @@ func TestParseCSV(t *testing.T) {
|
|||
|
||||
}
|
||||
}
|
||||
|
||||
// https://twitter.com/francesc/status/603066617124126720
|
||||
// for the construct: defer testRetryWhenDone().Reset()
|
||||
type wd struct {
|
||||
Reset func()
|
||||
}
|
||||
|
||||
func testRetryWhenDone() wd {
|
||||
cd := viper.GetString("CacheDir")
|
||||
viper.Set("CacheDir", helpers.GetTempDir("", hugofs.SourceFs))
|
||||
var tmpSleep time.Duration
|
||||
tmpSleep, resSleep = resSleep, time.Millisecond
|
||||
return wd{func() {
|
||||
viper.Set("CacheDir", cd)
|
||||
resSleep = tmpSleep
|
||||
}}
|
||||
}
|
||||
|
||||
func TestGetJSONFailParse(t *testing.T) {
|
||||
defer testRetryWhenDone().Reset()
|
||||
|
||||
reqCount := 0
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if reqCount > 0 {
|
||||
w.Header().Add("Content-type", "application/json")
|
||||
fmt.Fprintln(w, `{"gomeetup":["Sydney", "San Francisco", "Stockholm"]}`)
|
||||
} else {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
fmt.Fprintln(w, `ERROR 500`)
|
||||
}
|
||||
reqCount++
|
||||
}))
|
||||
defer ts.Close()
|
||||
url := ts.URL + "/test.json"
|
||||
defer os.Remove(getCacheFileID(url))
|
||||
|
||||
want := map[string]interface{}{"gomeetup": []interface{}{"Sydney", "San Francisco", "Stockholm"}}
|
||||
have := GetJSON(url)
|
||||
assert.NotNil(t, have)
|
||||
if have != nil {
|
||||
assert.EqualValues(t, want, have)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCSVFailParseSep(t *testing.T) {
|
||||
defer testRetryWhenDone().Reset()
|
||||
|
||||
reqCount := 0
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if reqCount > 0 {
|
||||
w.Header().Add("Content-type", "application/json")
|
||||
fmt.Fprintln(w, `gomeetup,city`)
|
||||
fmt.Fprintln(w, `yes,Sydney`)
|
||||
fmt.Fprintln(w, `yes,San Francisco`)
|
||||
fmt.Fprintln(w, `yes,Stockholm`)
|
||||
} else {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
fmt.Fprintln(w, `ERROR 500`)
|
||||
}
|
||||
reqCount++
|
||||
}))
|
||||
defer ts.Close()
|
||||
url := ts.URL + "/test.csv"
|
||||
defer os.Remove(getCacheFileID(url))
|
||||
|
||||
want := [][]string{[]string{"gomeetup", "city"}, []string{"yes", "Sydney"}, []string{"yes", "San Francisco"}, []string{"yes", "Stockholm"}}
|
||||
have := GetCSV(",", url)
|
||||
assert.NotNil(t, have)
|
||||
if have != nil {
|
||||
assert.EqualValues(t, want, have)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCSVFailParse(t *testing.T) {
|
||||
defer testRetryWhenDone().Reset()
|
||||
|
||||
reqCount := 0
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Add("Content-type", "application/json")
|
||||
if reqCount > 0 {
|
||||
fmt.Fprintln(w, `gomeetup,city`)
|
||||
fmt.Fprintln(w, `yes,Sydney`)
|
||||
fmt.Fprintln(w, `yes,San Francisco`)
|
||||
fmt.Fprintln(w, `yes,Stockholm`)
|
||||
} else {
|
||||
fmt.Fprintln(w, `gomeetup,city`)
|
||||
fmt.Fprintln(w, `yes,Sydney,Bondi,`) // wrong number of fields in line
|
||||
fmt.Fprintln(w, `yes,San Francisco`)
|
||||
fmt.Fprintln(w, `yes,Stockholm`)
|
||||
}
|
||||
reqCount++
|
||||
}))
|
||||
defer ts.Close()
|
||||
url := ts.URL + "/test.csv"
|
||||
defer os.Remove(getCacheFileID(url))
|
||||
|
||||
want := [][]string{[]string{"gomeetup", "city"}, []string{"yes", "Sydney"}, []string{"yes", "San Francisco"}, []string{"yes", "Stockholm"}}
|
||||
have := GetCSV(",", url)
|
||||
assert.NotNil(t, have)
|
||||
if have != nil {
|
||||
assert.EqualValues(t, want, have)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue