2016-03-21 19:28:42 -04:00
|
|
|
// Copyright 2016 The Hugo Authors. All rights reserved.
|
2014-12-26 22:40:10 -05:00
|
|
|
//
|
2015-11-23 22:16:36 -05:00
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
2014-12-26 22:40:10 -05:00
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
2015-11-23 22:16:36 -05:00
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
2014-12-26 22:40:10 -05:00
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2017-02-17 07:30:50 -05:00
|
|
|
package tplimpl
|
2014-12-26 22:40:10 -05:00
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"encoding/csv"
|
|
|
|
"encoding/json"
|
|
|
|
"errors"
|
|
|
|
"io/ioutil"
|
|
|
|
"net/http"
|
|
|
|
"net/url"
|
2015-06-26 06:23:37 -04:00
|
|
|
"path/filepath"
|
2014-12-26 22:40:10 -05:00
|
|
|
"strings"
|
|
|
|
"sync"
|
2015-05-28 01:36:06 -04:00
|
|
|
"time"
|
2014-12-26 22:40:10 -05:00
|
|
|
|
|
|
|
"github.com/spf13/afero"
|
2017-02-04 22:20:06 -05:00
|
|
|
"github.com/spf13/hugo/config"
|
2014-12-26 22:40:10 -05:00
|
|
|
"github.com/spf13/hugo/helpers"
|
|
|
|
jww "github.com/spf13/jwalterweatherman"
|
|
|
|
)
|
|
|
|
|
2015-05-28 01:36:06 -04:00
|
|
|
var (
|
|
|
|
remoteURLLock = &remoteLock{m: make(map[string]*sync.Mutex)}
|
|
|
|
resSleep = time.Second * 2 // if JSON decoding failed sleep for n seconds before retrying
|
|
|
|
resRetries = 1 // number of retries to load the JSON from URL or local file system
|
2017-03-31 02:54:22 -04:00
|
|
|
resCacheMu sync.RWMutex
|
2015-05-28 01:36:06 -04:00
|
|
|
)
|
2014-12-26 22:40:10 -05:00
|
|
|
|
|
|
|
type remoteLock struct {
|
|
|
|
sync.RWMutex
|
|
|
|
m map[string]*sync.Mutex
|
|
|
|
}
|
|
|
|
|
2015-03-18 01:16:54 -04:00
|
|
|
// URLLock locks an URL during download
|
2015-03-11 13:34:57 -04:00
|
|
|
func (l *remoteLock) URLLock(url string) {
|
2017-03-31 04:40:33 -04:00
|
|
|
var (
|
|
|
|
lock *sync.Mutex
|
|
|
|
ok bool
|
|
|
|
)
|
2014-12-26 22:40:10 -05:00
|
|
|
l.Lock()
|
2017-03-31 04:40:33 -04:00
|
|
|
if lock, ok = l.m[url]; !ok {
|
|
|
|
lock = &sync.Mutex{}
|
|
|
|
l.m[url] = lock
|
2014-12-26 22:40:10 -05:00
|
|
|
}
|
2017-02-17 08:24:56 -05:00
|
|
|
l.Unlock()
|
2017-03-31 04:40:33 -04:00
|
|
|
lock.Lock()
|
2014-12-26 22:40:10 -05:00
|
|
|
}
|
|
|
|
|
2015-03-18 01:16:54 -04:00
|
|
|
// URLUnlock unlocks an URL when the download has been finished. Use only in defer calls.
|
2015-03-11 13:34:57 -04:00
|
|
|
func (l *remoteLock) URLUnlock(url string) {
|
2014-12-26 22:40:10 -05:00
|
|
|
l.RLock()
|
|
|
|
defer l.RUnlock()
|
|
|
|
if um, ok := l.m[url]; ok {
|
|
|
|
um.Unlock()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-03-18 01:16:54 -04:00
|
|
|
// getCacheFileID returns the cache ID for a string
|
2017-02-04 22:20:06 -05:00
|
|
|
func getCacheFileID(cfg config.Provider, id string) string {
|
|
|
|
return cfg.GetString("cacheDir") + url.QueryEscape(id)
|
2014-12-26 22:40:10 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// resGetCache returns the content for an ID from the file cache or an error
|
|
|
|
// if the file is not found returns nil,nil
|
2017-02-04 22:20:06 -05:00
|
|
|
func resGetCache(id string, fs afero.Fs, cfg config.Provider, ignoreCache bool) ([]byte, error) {
|
2017-03-31 02:54:22 -04:00
|
|
|
resCacheMu.RLock()
|
|
|
|
defer resCacheMu.RUnlock()
|
|
|
|
|
2015-02-02 04:14:59 -05:00
|
|
|
if ignoreCache {
|
|
|
|
return nil, nil
|
|
|
|
}
|
2017-02-04 22:20:06 -05:00
|
|
|
fID := getCacheFileID(cfg, id)
|
2014-12-26 22:40:10 -05:00
|
|
|
isExists, err := helpers.Exists(fID, fs)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if !isExists {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
2016-07-30 09:37:03 -04:00
|
|
|
return afero.ReadFile(fs, fID)
|
|
|
|
|
2014-12-26 22:40:10 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// resWriteCache writes bytes to an ID into the file cache
|
2017-02-04 22:20:06 -05:00
|
|
|
func resWriteCache(id string, c []byte, fs afero.Fs, cfg config.Provider, ignoreCache bool) error {
|
2017-03-31 02:54:22 -04:00
|
|
|
resCacheMu.Lock()
|
|
|
|
defer resCacheMu.Unlock()
|
|
|
|
|
2016-04-10 06:11:18 -04:00
|
|
|
if ignoreCache {
|
|
|
|
return nil
|
|
|
|
}
|
2017-02-04 22:20:06 -05:00
|
|
|
fID := getCacheFileID(cfg, id)
|
2014-12-26 22:40:10 -05:00
|
|
|
f, err := fs.Create(fID)
|
|
|
|
if err != nil {
|
2015-05-28 01:36:06 -04:00
|
|
|
return errors.New("Error: " + err.Error() + ". Failed to create file: " + fID)
|
2014-12-26 22:40:10 -05:00
|
|
|
}
|
2015-05-28 01:36:06 -04:00
|
|
|
defer f.Close()
|
2014-12-26 22:40:10 -05:00
|
|
|
n, err := f.Write(c)
|
|
|
|
if n == 0 {
|
|
|
|
return errors.New("No bytes written to file: " + fID)
|
|
|
|
}
|
2015-05-28 01:36:06 -04:00
|
|
|
if err != nil {
|
|
|
|
return errors.New("Error: " + err.Error() + ". Failed to write to file: " + fID)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
func resDeleteCache(id string, fs afero.Fs, cfg config.Provider) error {
|
|
|
|
return fs.Remove(getCacheFileID(cfg, id))
|
2014-12-26 22:40:10 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// resGetRemote loads the content of a remote file. This method is thread safe.
|
2017-02-04 22:20:06 -05:00
|
|
|
func resGetRemote(url string, fs afero.Fs, cfg config.Provider, hc *http.Client) ([]byte, error) {
|
|
|
|
c, err := resGetCache(url, fs, cfg, cfg.GetBool("ignoreCache"))
|
2014-12-26 22:40:10 -05:00
|
|
|
if c != nil && err == nil {
|
|
|
|
return c, nil
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// avoid race condition with locks, block other goroutines if the current url is processing
|
2015-03-11 13:34:57 -04:00
|
|
|
remoteURLLock.URLLock(url)
|
|
|
|
defer func() { remoteURLLock.URLUnlock(url) }()
|
2014-12-26 22:40:10 -05:00
|
|
|
|
|
|
|
// avoid multiple locks due to calling resGetCache twice
|
2017-02-04 22:20:06 -05:00
|
|
|
c, err = resGetCache(url, fs, cfg, cfg.GetBool("ignoreCache"))
|
2014-12-26 22:40:10 -05:00
|
|
|
if c != nil && err == nil {
|
|
|
|
return c, nil
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
jww.INFO.Printf("Downloading: %s ...", url)
|
|
|
|
res, err := hc.Get(url)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
c, err = ioutil.ReadAll(res.Body)
|
|
|
|
res.Body.Close()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2017-02-04 22:20:06 -05:00
|
|
|
err = resWriteCache(url, c, fs, cfg, cfg.GetBool("ignoreCache"))
|
2014-12-26 22:40:10 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2017-02-04 22:20:06 -05:00
|
|
|
jww.INFO.Printf("... and cached to: %s", getCacheFileID(cfg, url))
|
2014-12-26 22:40:10 -05:00
|
|
|
return c, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// resGetLocal loads the content of a local file
|
2017-02-04 22:20:06 -05:00
|
|
|
func resGetLocal(url string, fs afero.Fs, cfg config.Provider) ([]byte, error) {
|
|
|
|
filename := filepath.Join(cfg.GetString("workingDir"), url)
|
2016-02-06 20:07:58 -05:00
|
|
|
if e, err := helpers.Exists(filename, fs); !e {
|
2014-12-26 22:40:10 -05:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2016-07-30 09:37:03 -04:00
|
|
|
return afero.ReadFile(fs, filename)
|
|
|
|
|
2014-12-26 22:40:10 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// resGetResource loads the content of a local or remote file
|
2017-01-10 04:55:03 -05:00
|
|
|
func (t *templateFuncster) resGetResource(url string) ([]byte, error) {
|
2014-12-26 22:40:10 -05:00
|
|
|
if url == "" {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
if strings.Contains(url, "://") {
|
2017-02-04 22:20:06 -05:00
|
|
|
return resGetRemote(url, t.Fs.Source, t.Cfg, http.DefaultClient)
|
2014-12-26 22:40:10 -05:00
|
|
|
}
|
2017-02-04 22:20:06 -05:00
|
|
|
return resGetLocal(url, t.Fs.Source, t.Cfg)
|
2014-12-26 22:40:10 -05:00
|
|
|
}
|
|
|
|
|
2016-03-03 19:00:41 -05:00
|
|
|
// getJSON expects one or n-parts of a URL to a resource which can either be a local or a remote one.
|
2015-02-07 23:33:30 -05:00
|
|
|
// If you provide multiple parts they will be joined together to the final URL.
|
2015-03-18 01:16:54 -04:00
|
|
|
// GetJSON returns nil or parsed JSON to use in a short code.
|
2017-01-10 04:55:03 -05:00
|
|
|
func (t *templateFuncster) getJSON(urlParts ...string) interface{} {
|
2015-05-28 01:36:06 -04:00
|
|
|
var v interface{}
|
2015-02-07 23:33:30 -05:00
|
|
|
url := strings.Join(urlParts, "")
|
2014-12-26 22:40:10 -05:00
|
|
|
|
2015-05-28 01:36:06 -04:00
|
|
|
for i := 0; i <= resRetries; i++ {
|
2017-01-10 04:55:03 -05:00
|
|
|
c, err := t.resGetResource(url)
|
2015-05-28 01:36:06 -04:00
|
|
|
if err != nil {
|
|
|
|
jww.ERROR.Printf("Failed to get json resource %s with error message %s", url, err)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
err = json.Unmarshal(c, &v)
|
|
|
|
if err != nil {
|
|
|
|
jww.ERROR.Printf("Cannot read json from resource %s with error message %s", url, err)
|
|
|
|
jww.ERROR.Printf("Retry #%d for %s and sleeping for %s", i, url, resSleep)
|
|
|
|
time.Sleep(resSleep)
|
2017-02-04 22:20:06 -05:00
|
|
|
resDeleteCache(url, t.Fs.Source, t.Cfg)
|
2015-05-28 01:36:06 -04:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
break
|
2014-12-26 22:40:10 -05:00
|
|
|
}
|
|
|
|
return v
|
|
|
|
}
|
|
|
|
|
2015-03-18 01:16:54 -04:00
|
|
|
// parseCSV parses bytes of CSV data into a slice slice string or an error
|
2015-03-11 13:34:57 -04:00
|
|
|
func parseCSV(c []byte, sep string) ([][]string, error) {
|
2014-12-26 22:40:10 -05:00
|
|
|
if len(sep) != 1 {
|
|
|
|
return nil, errors.New("Incorrect length of csv separator: " + sep)
|
|
|
|
}
|
|
|
|
b := bytes.NewReader(c)
|
|
|
|
r := csv.NewReader(b)
|
|
|
|
rSep := []rune(sep)
|
|
|
|
r.Comma = rSep[0]
|
|
|
|
r.FieldsPerRecord = 0
|
|
|
|
return r.ReadAll()
|
|
|
|
}
|
|
|
|
|
2016-03-03 19:00:41 -05:00
|
|
|
// getCSV expects a data separator and one or n-parts of a URL to a resource which
|
2015-02-07 23:33:30 -05:00
|
|
|
// can either be a local or a remote one.
|
|
|
|
// The data separator can be a comma, semi-colon, pipe, etc, but only one character.
|
|
|
|
// If you provide multiple parts for the URL they will be joined together to the final URL.
|
2015-03-18 01:16:54 -04:00
|
|
|
// GetCSV returns nil or a slice slice to use in a short code.
|
2017-01-10 04:55:03 -05:00
|
|
|
func (t *templateFuncster) getCSV(sep string, urlParts ...string) [][]string {
|
2015-05-28 01:36:06 -04:00
|
|
|
var d [][]string
|
2015-02-07 23:33:30 -05:00
|
|
|
url := strings.Join(urlParts, "")
|
2015-05-28 01:36:06 -04:00
|
|
|
|
|
|
|
var clearCacheSleep = func(i int, u string) {
|
|
|
|
jww.ERROR.Printf("Retry #%d for %s and sleeping for %s", i, url, resSleep)
|
|
|
|
time.Sleep(resSleep)
|
2017-02-04 22:20:06 -05:00
|
|
|
resDeleteCache(url, t.Fs.Source, t.Cfg)
|
2014-12-26 22:40:10 -05:00
|
|
|
}
|
2015-05-28 01:36:06 -04:00
|
|
|
|
|
|
|
for i := 0; i <= resRetries; i++ {
|
2017-01-10 04:55:03 -05:00
|
|
|
c, err := t.resGetResource(url)
|
2015-05-28 01:36:06 -04:00
|
|
|
|
2016-11-23 12:53:50 -05:00
|
|
|
if err == nil && !bytes.Contains(c, []byte(sep)) {
|
2015-05-28 01:36:06 -04:00
|
|
|
err = errors.New("Cannot find separator " + sep + " in CSV.")
|
|
|
|
}
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
jww.ERROR.Printf("Failed to read csv resource %s with error message %s", url, err)
|
|
|
|
clearCacheSleep(i, url)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if d, err = parseCSV(c, sep); err != nil {
|
|
|
|
jww.ERROR.Printf("Failed to parse csv file %s with error message %s", url, err)
|
|
|
|
clearCacheSleep(i, url)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
break
|
2014-12-26 22:40:10 -05:00
|
|
|
}
|
|
|
|
return d
|
|
|
|
}
|