mirror of
https://github.com/gohugoio/hugo.git
synced 2025-03-23 08:33:34 +00:00
Add support for URLs relative to context root
Setting `RelativeURLs` to `true` will make all relative URLs in the site *really* relative. And will do so with speed. So: In `/post/myblogpost.html`: `/mycss.css` becomes `../mycss.css` The same in `/index.html` will become: `./mycss.css` etc. Note that absolute URLs will not be touched (either external resources, or URLs constructed with `BaseURL`). The speediness is about the same as before: ``` benchmark old ns/op new ns/op delta BenchmarkAbsURL 17462 18164 +4.02% BenchmarkAbsURLSrcset 18842 19632 +4.19% BenchmarkXMLAbsURLSrcset 18643 19313 +3.59% BenchmarkXMLAbsURL 9283 9656 +4.02% benchmark old allocs new allocs delta BenchmarkAbsURL 24 28 +16.67% BenchmarkAbsURLSrcset 29 32 +10.34% BenchmarkXMLAbsURLSrcset 27 30 +11.11% BenchmarkXMLAbsURL 12 14 +16.67% benchmark old bytes new bytes delta BenchmarkAbsURL 3154 3404 +7.93% BenchmarkAbsURLSrcset 2376 2573 +8.29% BenchmarkXMLAbsURLSrcset 2569 2763 +7.55% BenchmarkXMLAbsURL 1888 1998 +5.83% ``` Fixes #1104 Fixes #622 Fixes #937 Fixes #157
This commit is contained in:
parent
e522e5f415
commit
beaa8b1bca
10 changed files with 181 additions and 110 deletions
|
@ -139,6 +139,7 @@ func InitializeConfig() {
|
||||||
viper.SetDefault("Verbose", false)
|
viper.SetDefault("Verbose", false)
|
||||||
viper.SetDefault("IgnoreCache", false)
|
viper.SetDefault("IgnoreCache", false)
|
||||||
viper.SetDefault("CanonifyURLs", false)
|
viper.SetDefault("CanonifyURLs", false)
|
||||||
|
viper.SetDefault("RelativeURLs", false)
|
||||||
viper.SetDefault("Taxonomies", map[string]string{"tag": "tags", "category": "categories"})
|
viper.SetDefault("Taxonomies", map[string]string{"tag": "tags", "category": "categories"})
|
||||||
viper.SetDefault("Permalinks", make(hugolib.PermalinkOverrides, 0))
|
viper.SetDefault("Permalinks", make(hugolib.PermalinkOverrides, 0))
|
||||||
viper.SetDefault("Sitemap", hugolib.Sitemap{Priority: -1})
|
viper.SetDefault("Sitemap", hugolib.Sitemap{Priority: -1})
|
||||||
|
|
|
@ -232,6 +232,41 @@ func MakePathRelative(inPath string, possibleDirectories ...string) (string, err
|
||||||
return inPath, errors.New("Can't extract relative path, unknown prefix")
|
return inPath, errors.New("Can't extract relative path, unknown prefix")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Should be good enough for Hugo.
|
||||||
|
var isFileRe = regexp.MustCompile(".*\\..{1,6}$")
|
||||||
|
|
||||||
|
// Expects a relative path starting after the content directory.
|
||||||
|
func GetDottedRelativePath(inPath string) string {
|
||||||
|
inPath = filepath.Clean(filepath.FromSlash(inPath))
|
||||||
|
if inPath == "." {
|
||||||
|
return "./"
|
||||||
|
}
|
||||||
|
isFile := isFileRe.MatchString(inPath)
|
||||||
|
if !isFile {
|
||||||
|
if !strings.HasSuffix(inPath, FilePathSeparator) {
|
||||||
|
inPath += FilePathSeparator
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !strings.HasPrefix(inPath, FilePathSeparator) {
|
||||||
|
inPath = FilePathSeparator + inPath
|
||||||
|
}
|
||||||
|
dir, _ := filepath.Split(inPath)
|
||||||
|
|
||||||
|
sectionCount := strings.Count(dir, FilePathSeparator)
|
||||||
|
|
||||||
|
if sectionCount == 0 || dir == FilePathSeparator {
|
||||||
|
return "./"
|
||||||
|
}
|
||||||
|
|
||||||
|
var dottedPath string
|
||||||
|
|
||||||
|
for i := 1; i < sectionCount; i++ {
|
||||||
|
dottedPath += "../"
|
||||||
|
}
|
||||||
|
|
||||||
|
return dottedPath
|
||||||
|
}
|
||||||
|
|
||||||
// Filename takes a path, strips out the extension,
|
// Filename takes a path, strips out the extension,
|
||||||
// and returns the name of the file.
|
// and returns the name of the file.
|
||||||
func Filename(in string) (name string) {
|
func Filename(in string) (name string) {
|
||||||
|
|
|
@ -112,6 +112,45 @@ func TestMakePathRelative(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestGetDottedRelativePath(t *testing.T) {
|
||||||
|
// on Windows this will receive both kinds, both country and western ...
|
||||||
|
for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} {
|
||||||
|
doTestGetDottedRelativePath(f, t)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) {
|
||||||
|
type test struct {
|
||||||
|
input, expected string
|
||||||
|
}
|
||||||
|
data := []test{
|
||||||
|
{"", "./"},
|
||||||
|
{urlFixer("/"), "./"},
|
||||||
|
{urlFixer("post"), "../"},
|
||||||
|
{urlFixer("/post"), "../"},
|
||||||
|
{urlFixer("post/"), "../"},
|
||||||
|
{urlFixer("tags/foo.html"), "../"},
|
||||||
|
{urlFixer("/tags/foo.html"), "../"},
|
||||||
|
{urlFixer("/post/"), "../"},
|
||||||
|
{urlFixer("////post/////"), "../"},
|
||||||
|
{urlFixer("/foo/bar/index.html"), "../../"},
|
||||||
|
{urlFixer("/foo/bar/foo/"), "../../../"},
|
||||||
|
{urlFixer("/foo/bar/foo"), "../../../"},
|
||||||
|
{urlFixer("foo/bar/foo/"), "../../../"},
|
||||||
|
{urlFixer("foo/bar/foo/bar"), "../../../../"},
|
||||||
|
{"404.html", "./"},
|
||||||
|
{"404.xml", "./"},
|
||||||
|
{"/404.html", "./"},
|
||||||
|
}
|
||||||
|
for i, d := range data {
|
||||||
|
output := GetDottedRelativePath(d.input)
|
||||||
|
if d.expected != output {
|
||||||
|
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestMakeTitle(t *testing.T) {
|
func TestMakeTitle(t *testing.T) {
|
||||||
type test struct {
|
type test struct {
|
||||||
input, expected string
|
input, expected string
|
||||||
|
|
|
@ -1397,16 +1397,21 @@ func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layout
|
||||||
|
|
||||||
err := s.render(name, d, renderBuffer, layouts...)
|
err := s.render(name, d, renderBuffer, layouts...)
|
||||||
|
|
||||||
absURLInXML, err := transform.AbsURLInXML()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
outBuffer := bp.GetBuffer()
|
outBuffer := bp.GetBuffer()
|
||||||
defer bp.PutBuffer(outBuffer)
|
defer bp.PutBuffer(outBuffer)
|
||||||
|
|
||||||
transformer := transform.NewChain(absURLInXML...)
|
var path []byte
|
||||||
transformer.Apply(outBuffer, renderBuffer)
|
if viper.GetBool("RelativeURLs") {
|
||||||
|
path = []byte(helpers.GetDottedRelativePath(dest))
|
||||||
|
} else {
|
||||||
|
s := viper.GetString("BaseURL")
|
||||||
|
if !strings.HasSuffix(s, "/") {
|
||||||
|
s += "/"
|
||||||
|
}
|
||||||
|
path = []byte(s)
|
||||||
|
}
|
||||||
|
transformer := transform.NewChain(transform.AbsURLInXML)
|
||||||
|
transformer.Apply(outBuffer, renderBuffer, path)
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
err = s.WriteDestFile(dest, outBuffer)
|
err = s.WriteDestFile(dest, outBuffer)
|
||||||
|
@ -1426,20 +1431,32 @@ func (s *Site) renderAndWritePage(name string, dest string, d interface{}, layou
|
||||||
|
|
||||||
transformLinks := transform.NewEmptyTransforms()
|
transformLinks := transform.NewEmptyTransforms()
|
||||||
|
|
||||||
if viper.GetBool("CanonifyURLs") {
|
if viper.GetBool("RelativeURLs") || viper.GetBool("CanonifyURLs") {
|
||||||
absURL, err := transform.AbsURL()
|
transformLinks = append(transformLinks, transform.AbsURL)
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
transformLinks = append(transformLinks, absURL...)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if viper.GetBool("watch") && !viper.GetBool("DisableLiveReload") {
|
if viper.GetBool("watch") && !viper.GetBool("DisableLiveReload") {
|
||||||
transformLinks = append(transformLinks, transform.LiveReloadInject)
|
transformLinks = append(transformLinks, transform.LiveReloadInject)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var path []byte
|
||||||
|
|
||||||
|
if viper.GetBool("RelativeURLs") {
|
||||||
|
translated, err := s.PageTarget().(target.OptionalTranslator).TranslateRelative(dest)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
path = []byte(helpers.GetDottedRelativePath(translated))
|
||||||
|
} else if viper.GetBool("CanonifyURLs") {
|
||||||
|
s := viper.GetString("BaseURL")
|
||||||
|
if !strings.HasSuffix(s, "/") {
|
||||||
|
s += "/"
|
||||||
|
}
|
||||||
|
path = []byte(s)
|
||||||
|
}
|
||||||
|
|
||||||
transformer := transform.NewChain(transformLinks...)
|
transformer := transform.NewChain(transformLinks...)
|
||||||
transformer.Apply(outBuffer, renderBuffer)
|
transformer.Apply(outBuffer, renderBuffer, path)
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
if err = s.WriteDestPage(dest, outBuffer); err != nil {
|
if err = s.WriteDestPage(dest, outBuffer); err != nil {
|
||||||
|
|
|
@ -16,6 +16,11 @@ type Translator interface {
|
||||||
Translate(string) (string, error)
|
Translate(string) (string, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO(bep) consider other ways to solve this.
|
||||||
|
type OptionalTranslator interface {
|
||||||
|
TranslateRelative(string) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
type Output interface {
|
type Output interface {
|
||||||
Publisher
|
Publisher
|
||||||
Translator
|
Translator
|
||||||
|
|
|
@ -32,10 +32,18 @@ func (pp *PagePub) Publish(path string, r io.Reader) (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pp *PagePub) Translate(src string) (dest string, err error) {
|
func (pp *PagePub) Translate(src string) (dest string, err error) {
|
||||||
if src == helpers.FilePathSeparator {
|
dir, err := pp.TranslateRelative(src)
|
||||||
if pp.PublishDir != "" {
|
if err != nil {
|
||||||
return filepath.Join(pp.PublishDir, "index.html"), nil
|
return dir, err
|
||||||
}
|
}
|
||||||
|
if pp.PublishDir != "" {
|
||||||
|
dir = filepath.Join(pp.PublishDir, dir)
|
||||||
|
}
|
||||||
|
return dir, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pp *PagePub) TranslateRelative(src string) (dest string, err error) {
|
||||||
|
if src == helpers.FilePathSeparator {
|
||||||
return "index.html", nil
|
return "index.html", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -43,9 +51,6 @@ func (pp *PagePub) Translate(src string) (dest string, err error) {
|
||||||
isRoot := dir == ""
|
isRoot := dir == ""
|
||||||
ext := pp.extension(filepath.Ext(file))
|
ext := pp.extension(filepath.Ext(file))
|
||||||
name := filename(file)
|
name := filename(file)
|
||||||
if pp.PublishDir != "" {
|
|
||||||
dir = filepath.Join(pp.PublishDir, dir)
|
|
||||||
}
|
|
||||||
|
|
||||||
if pp.UglyURLs || file == "index.html" || (isRoot && file == "404.html") {
|
if pp.UglyURLs || file == "index.html" || (isRoot && file == "404.html") {
|
||||||
return filepath.Join(dir, fmt.Sprintf("%s%s", name, ext)), nil
|
return filepath.Join(dir, fmt.Sprintf("%s%s", name, ext)), nil
|
||||||
|
|
|
@ -1,58 +1,11 @@
|
||||||
package transform
|
package transform
|
||||||
|
|
||||||
import (
|
var ar *absURLReplacer = newAbsURLReplacer()
|
||||||
"github.com/spf13/viper"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
// to be used in tests; the live site will get its value from Viper.
|
var AbsURL = func(ct contentTransformer) {
|
||||||
var AbsBaseUrl string
|
|
||||||
|
|
||||||
var absURLInit sync.Once
|
|
||||||
var ar *absURLReplacer
|
|
||||||
|
|
||||||
func AbsURL() (trs []link, err error) {
|
|
||||||
initAbsURLReplacer()
|
|
||||||
return absURLFromReplacer(ar)
|
|
||||||
}
|
|
||||||
|
|
||||||
func absURLFromURL(URL string) (trs []link, err error) {
|
|
||||||
return absURLFromReplacer(newAbsURLReplacer(URL))
|
|
||||||
}
|
|
||||||
|
|
||||||
func absURLFromReplacer(ar *absURLReplacer) (trs []link, err error) {
|
|
||||||
trs = append(trs, func(ct contentTransformer) {
|
|
||||||
ar.replaceInHTML(ct)
|
ar.replaceInHTML(ct)
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func AbsURLInXML() (trs []link, err error) {
|
var AbsURLInXML = func(ct contentTransformer) {
|
||||||
initAbsURLReplacer()
|
|
||||||
return absURLInXMLFromReplacer(ar)
|
|
||||||
}
|
|
||||||
|
|
||||||
func absURLInXMLFromURL(URL string) (trs []link, err error) {
|
|
||||||
return absURLInXMLFromReplacer(newAbsURLReplacer(URL))
|
|
||||||
}
|
|
||||||
|
|
||||||
func absURLInXMLFromReplacer(ar *absURLReplacer) (trs []link, err error) {
|
|
||||||
trs = append(trs, func(ct contentTransformer) {
|
|
||||||
ar.replaceInXML(ct)
|
ar.replaceInXML(ct)
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func initAbsURLReplacer() {
|
|
||||||
absURLInit.Do(func() {
|
|
||||||
var url string
|
|
||||||
|
|
||||||
if AbsBaseUrl != "" {
|
|
||||||
url = AbsBaseUrl
|
|
||||||
} else {
|
|
||||||
url = viper.GetString("BaseURL")
|
|
||||||
}
|
|
||||||
|
|
||||||
ar = newAbsURLReplacer(url)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,8 +3,6 @@ package transform
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"io"
|
"io"
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -23,6 +21,9 @@ type absurllexer struct {
|
||||||
// the target for the new absurlified content
|
// the target for the new absurlified content
|
||||||
w io.Writer
|
w io.Writer
|
||||||
|
|
||||||
|
// path may be set to a "." relative path
|
||||||
|
path []byte
|
||||||
|
|
||||||
pos int // input position
|
pos int // input position
|
||||||
start int // item start position
|
start int // item start position
|
||||||
width int // width of last element
|
width int // width of last element
|
||||||
|
@ -56,7 +57,6 @@ var prefixes = []*prefix{
|
||||||
type absURLMatcher struct {
|
type absURLMatcher struct {
|
||||||
match []byte
|
match []byte
|
||||||
quote []byte
|
quote []byte
|
||||||
replacementURL []byte
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// match check rune inside word. Will be != ' '.
|
// match check rune inside word. Will be != ' '.
|
||||||
|
@ -147,7 +147,7 @@ func checkCandidateBase(l *absurllexer) {
|
||||||
}
|
}
|
||||||
l.pos += len(m.match)
|
l.pos += len(m.match)
|
||||||
l.w.Write(m.quote)
|
l.w.Write(m.quote)
|
||||||
l.w.Write(m.replacementURL)
|
l.w.Write(l.path)
|
||||||
l.start = l.pos
|
l.start = l.pos
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -188,7 +188,7 @@ func checkCandidateSrcset(l *absurllexer) {
|
||||||
l.w.Write([]byte(m.quote))
|
l.w.Write([]byte(m.quote))
|
||||||
for i, f := range fields {
|
for i, f := range fields {
|
||||||
if f[0] == '/' {
|
if f[0] == '/' {
|
||||||
l.w.Write(m.replacementURL)
|
l.w.Write(l.path)
|
||||||
l.w.Write(f[1:])
|
l.w.Write(f[1:])
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
@ -252,9 +252,11 @@ func (l *absurllexer) replace() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func doReplace(ct contentTransformer, matchers []absURLMatcher) {
|
func doReplace(ct contentTransformer, matchers []absURLMatcher) {
|
||||||
|
|
||||||
lexer := &absurllexer{
|
lexer := &absurllexer{
|
||||||
content: ct.Content(),
|
content: ct.Content(),
|
||||||
w: ct,
|
w: ct,
|
||||||
|
path: ct.Path(),
|
||||||
matchers: matchers}
|
matchers: matchers}
|
||||||
|
|
||||||
lexer.replace()
|
lexer.replace()
|
||||||
|
@ -265,9 +267,7 @@ type absURLReplacer struct {
|
||||||
xmlMatchers []absURLMatcher
|
xmlMatchers []absURLMatcher
|
||||||
}
|
}
|
||||||
|
|
||||||
func newAbsURLReplacer(baseURL string) *absURLReplacer {
|
func newAbsURLReplacer() *absURLReplacer {
|
||||||
u, _ := url.Parse(baseURL)
|
|
||||||
base := []byte(strings.TrimRight(u.String(), "/") + "/")
|
|
||||||
|
|
||||||
// HTML
|
// HTML
|
||||||
dqHTMLMatch := []byte("\"/")
|
dqHTMLMatch := []byte("\"/")
|
||||||
|
@ -285,14 +285,13 @@ func newAbsURLReplacer(baseURL string) *absURLReplacer {
|
||||||
|
|
||||||
return &absURLReplacer{
|
return &absURLReplacer{
|
||||||
htmlMatchers: []absURLMatcher{
|
htmlMatchers: []absURLMatcher{
|
||||||
{dqHTMLMatch, dqHTML, base},
|
{dqHTMLMatch, dqHTML},
|
||||||
{sqHTMLMatch, sqHTML, base},
|
{sqHTMLMatch, sqHTML},
|
||||||
},
|
},
|
||||||
xmlMatchers: []absURLMatcher{
|
xmlMatchers: []absURLMatcher{
|
||||||
{dqXMLMatch, dqXML, base},
|
{dqXMLMatch, dqXML},
|
||||||
{sqXMLMatch, sqXML, base},
|
{sqXMLMatch, sqXML},
|
||||||
}}
|
}}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (au *absURLReplacer) replaceInHTML(ct contentTransformer) {
|
func (au *absURLReplacer) replaceInHTML(ct contentTransformer) {
|
||||||
|
|
|
@ -23,6 +23,7 @@ func NewEmptyTransforms() []link {
|
||||||
// contentTransformer is an interface that enables rotation of pooled buffers
|
// contentTransformer is an interface that enables rotation of pooled buffers
|
||||||
// in the transformer chain.
|
// in the transformer chain.
|
||||||
type contentTransformer interface {
|
type contentTransformer interface {
|
||||||
|
Path() []byte
|
||||||
Content() []byte
|
Content() []byte
|
||||||
io.Writer
|
io.Writer
|
||||||
}
|
}
|
||||||
|
@ -30,10 +31,15 @@ type contentTransformer interface {
|
||||||
// Implements contentTransformer
|
// Implements contentTransformer
|
||||||
// Content is read from the from-buffer and rewritten to to the to-buffer.
|
// Content is read from the from-buffer and rewritten to to the to-buffer.
|
||||||
type fromToBuffer struct {
|
type fromToBuffer struct {
|
||||||
|
path []byte
|
||||||
from *bytes.Buffer
|
from *bytes.Buffer
|
||||||
to *bytes.Buffer
|
to *bytes.Buffer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ft fromToBuffer) Path() []byte {
|
||||||
|
return ft.path
|
||||||
|
}
|
||||||
|
|
||||||
func (ft fromToBuffer) Write(p []byte) (n int, err error) {
|
func (ft fromToBuffer) Write(p []byte) (n int, err error) {
|
||||||
return ft.to.Write(p)
|
return ft.to.Write(p)
|
||||||
}
|
}
|
||||||
|
@ -42,7 +48,7 @@ func (ft fromToBuffer) Content() []byte {
|
||||||
return ft.from.Bytes()
|
return ft.from.Bytes()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *chain) Apply(w io.Writer, r io.Reader) error {
|
func (c *chain) Apply(w io.Writer, r io.Reader, p []byte) error {
|
||||||
|
|
||||||
b1 := bp.GetBuffer()
|
b1 := bp.GetBuffer()
|
||||||
defer bp.PutBuffer(b1)
|
defer bp.PutBuffer(b1)
|
||||||
|
@ -57,7 +63,7 @@ func (c *chain) Apply(w io.Writer, r io.Reader) error {
|
||||||
b2 := bp.GetBuffer()
|
b2 := bp.GetBuffer()
|
||||||
defer bp.PutBuffer(b2)
|
defer bp.PutBuffer(b2)
|
||||||
|
|
||||||
fb := &fromToBuffer{from: b1, to: b2}
|
fb := &fromToBuffer{path: p, from: b1, to: b2}
|
||||||
|
|
||||||
for i, tr := range *c {
|
for i, tr := range *c {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
|
|
|
@ -3,6 +3,7 @@ package transform
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"github.com/spf13/hugo/helpers"
|
"github.com/spf13/hugo/helpers"
|
||||||
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
@ -62,6 +63,11 @@ schemaless: <img srcset='//img.jpg' src='//basic.jpg'>
|
||||||
schemaless2: <img srcset="//img.jpg" src="//basic.jpg2> POST
|
schemaless2: <img srcset="//img.jpg" src="//basic.jpg2> POST
|
||||||
`
|
`
|
||||||
|
|
||||||
|
const REL_PATH_VARIATIONS = `PRE. a href="/img/small.jpg" POST.`
|
||||||
|
const REL_PATH_VARIATIONS_CORRECT = `PRE. a href="../../img/small.jpg" POST.`
|
||||||
|
|
||||||
|
const testBaseURL = "http://base/"
|
||||||
|
|
||||||
var abs_url_bench_tests = []test{
|
var abs_url_bench_tests = []test{
|
||||||
{H5_JS_CONTENT_DOUBLE_QUOTE, CORRECT_OUTPUT_SRC_HREF_DQ},
|
{H5_JS_CONTENT_DOUBLE_QUOTE, CORRECT_OUTPUT_SRC_HREF_DQ},
|
||||||
{H5_JS_CONTENT_SINGLE_QUOTE, CORRECT_OUTPUT_SRC_HREF_SQ},
|
{H5_JS_CONTENT_SINGLE_QUOTE, CORRECT_OUTPUT_SRC_HREF_SQ},
|
||||||
|
@ -85,11 +91,13 @@ var srcset_xml_tests = []test{
|
||||||
{SRCSET_XML_SINGLE_QUOTE, SRCSET_XML_SINGLE_QUOTE_CORRECT},
|
{SRCSET_XML_SINGLE_QUOTE, SRCSET_XML_SINGLE_QUOTE_CORRECT},
|
||||||
{SRCSET_XML_VARIATIONS, SRCSET_XML_VARIATIONS_CORRECT}}
|
{SRCSET_XML_VARIATIONS, SRCSET_XML_VARIATIONS_CORRECT}}
|
||||||
|
|
||||||
|
var relurl_tests = []test{{REL_PATH_VARIATIONS, REL_PATH_VARIATIONS_CORRECT}}
|
||||||
|
|
||||||
func TestChainZeroTransformers(t *testing.T) {
|
func TestChainZeroTransformers(t *testing.T) {
|
||||||
tr := NewChain()
|
tr := NewChain()
|
||||||
in := new(bytes.Buffer)
|
in := new(bytes.Buffer)
|
||||||
out := new(bytes.Buffer)
|
out := new(bytes.Buffer)
|
||||||
if err := tr.Apply(in, out); err != nil {
|
if err := tr.Apply(in, out, []byte("")); err != nil {
|
||||||
t.Errorf("A zero transformer chain returned an error.")
|
t.Errorf("A zero transformer chain returned an error.")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -112,7 +120,7 @@ func TestChaingMultipleTransformers(t *testing.T) {
|
||||||
tr := NewChain(f1, f2, f3, f4)
|
tr := NewChain(f1, f2, f3, f4)
|
||||||
|
|
||||||
out := new(bytes.Buffer)
|
out := new(bytes.Buffer)
|
||||||
if err := tr.Apply(out, helpers.StringToReader("Test: f4 f3 f1 f2 f1 The End.")); err != nil {
|
if err := tr.Apply(out, helpers.StringToReader("Test: f4 f3 f1 f2 f1 The End."), []byte("")); err != nil {
|
||||||
t.Errorf("Multi transformer chain returned an error: %s", err)
|
t.Errorf("Multi transformer chain returned an error: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -124,8 +132,7 @@ func TestChaingMultipleTransformers(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkAbsURL(b *testing.B) {
|
func BenchmarkAbsURL(b *testing.B) {
|
||||||
absURL, _ := absURLFromURL("http://base")
|
tr := NewChain(AbsURL)
|
||||||
tr := NewChain(absURL...)
|
|
||||||
|
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
|
@ -134,8 +141,7 @@ func BenchmarkAbsURL(b *testing.B) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkAbsURLSrcset(b *testing.B) {
|
func BenchmarkAbsURLSrcset(b *testing.B) {
|
||||||
absURL, _ := absURLFromURL("http://base")
|
tr := NewChain(AbsURL)
|
||||||
tr := NewChain(absURL...)
|
|
||||||
|
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
|
@ -144,8 +150,7 @@ func BenchmarkAbsURLSrcset(b *testing.B) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkXMLAbsURLSrcset(b *testing.B) {
|
func BenchmarkXMLAbsURLSrcset(b *testing.B) {
|
||||||
absXMLURL, _ := absURLInXMLFromURL("http://base")
|
tr := NewChain(AbsURLInXML)
|
||||||
tr := NewChain(absXMLURL...)
|
|
||||||
|
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
|
@ -154,31 +159,33 @@ func BenchmarkXMLAbsURLSrcset(b *testing.B) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAbsURL(t *testing.T) {
|
func TestAbsURL(t *testing.T) {
|
||||||
absURL, _ := absURLFromURL("http://base")
|
tr := NewChain(AbsURL)
|
||||||
tr := NewChain(absURL...)
|
|
||||||
|
|
||||||
apply(t.Errorf, tr, abs_url_tests)
|
apply(t.Errorf, tr, abs_url_tests)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestRelativeURL(t *testing.T) {
|
||||||
|
tr := NewChain(AbsURL)
|
||||||
|
|
||||||
|
applyWithPath(t.Errorf, tr, relurl_tests, helpers.GetDottedRelativePath(filepath.FromSlash("/post/sub/")))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
func TestAbsURLSrcSet(t *testing.T) {
|
func TestAbsURLSrcSet(t *testing.T) {
|
||||||
absURL, _ := absURLFromURL("http://base")
|
tr := NewChain(AbsURL)
|
||||||
tr := NewChain(absURL...)
|
|
||||||
|
|
||||||
apply(t.Errorf, tr, srcset_tests)
|
apply(t.Errorf, tr, srcset_tests)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAbsXMLURLSrcSet(t *testing.T) {
|
func TestAbsXMLURLSrcSet(t *testing.T) {
|
||||||
absURLInXML, _ := absURLInXMLFromURL("http://base")
|
tr := NewChain(AbsURLInXML)
|
||||||
tr := NewChain(absURLInXML...)
|
|
||||||
|
|
||||||
apply(t.Errorf, tr, srcset_xml_tests)
|
apply(t.Errorf, tr, srcset_xml_tests)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkXMLAbsURL(b *testing.B) {
|
func BenchmarkXMLAbsURL(b *testing.B) {
|
||||||
absURLInXML, _ := absURLInXMLFromURL("http://base")
|
tr := NewChain(AbsURLInXML)
|
||||||
tr := NewChain(absURLInXML...)
|
|
||||||
|
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
|
@ -187,17 +194,17 @@ func BenchmarkXMLAbsURL(b *testing.B) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestXMLAbsURL(t *testing.T) {
|
func TestXMLAbsURL(t *testing.T) {
|
||||||
absURLInXML, _ := absURLInXMLFromURL("http://base")
|
tr := NewChain(AbsURLInXML)
|
||||||
tr := NewChain(absURLInXML...)
|
|
||||||
apply(t.Errorf, tr, xml_abs_url_tests)
|
apply(t.Errorf, tr, xml_abs_url_tests)
|
||||||
}
|
}
|
||||||
|
|
||||||
type errorf func(string, ...interface{})
|
type errorf func(string, ...interface{})
|
||||||
|
|
||||||
func apply(ef errorf, tr chain, tests []test) {
|
func applyWithPath(ef errorf, tr chain, tests []test, path string) {
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
out := new(bytes.Buffer)
|
out := new(bytes.Buffer)
|
||||||
err := tr.Apply(out, strings.NewReader(test.content))
|
var err error
|
||||||
|
err = tr.Apply(out, strings.NewReader(test.content), []byte(path))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ef("Unexpected error: %s", err)
|
ef("Unexpected error: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -207,6 +214,10 @@ func apply(ef errorf, tr chain, tests []test) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func apply(ef errorf, tr chain, tests []test) {
|
||||||
|
applyWithPath(ef, tr, tests, testBaseURL)
|
||||||
|
}
|
||||||
|
|
||||||
type test struct {
|
type test struct {
|
||||||
content string
|
content string
|
||||||
expected string
|
expected string
|
||||||
|
|
Loading…
Reference in a new issue