mirror of
https://github.com/gohugoio/hugo.git
synced 2025-02-04 03:51:43 +00:00
parent
27f5a906a2
commit
1b7ecfc2e1
6 changed files with 28 additions and 25 deletions
|
@ -1871,7 +1871,7 @@ func (p *Page) SaveSource() error {
|
||||||
// TODO(bep) lazy consolidate
|
// TODO(bep) lazy consolidate
|
||||||
func (p *Page) processShortcodes() error {
|
func (p *Page) processShortcodes() error {
|
||||||
p.shortcodeState = newShortcodeHandler(p)
|
p.shortcodeState = newShortcodeHandler(p)
|
||||||
tmpContent, err := p.shortcodeState.extractShortcodes(string(p.workContent), p.withoutContent())
|
tmpContent, err := p.shortcodeState.extractShortcodes(p.workContent, p.withoutContent())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -553,9 +553,9 @@ Loop:
|
||||||
|
|
||||||
return sc, nil
|
return sc, nil
|
||||||
case currItem.IsText():
|
case currItem.IsText():
|
||||||
sc.inner = append(sc.inner, currItem.Val)
|
sc.inner = append(sc.inner, currItem.ValStr())
|
||||||
case currItem.IsShortcodeName():
|
case currItem.IsShortcodeName():
|
||||||
sc.name = currItem.Val
|
sc.name = currItem.ValStr()
|
||||||
// We pick the first template for an arbitrary output format
|
// We pick the first template for an arbitrary output format
|
||||||
// if more than one. It is "all inner or no inner".
|
// if more than one. It is "all inner or no inner".
|
||||||
tmpl := getShortcodeTemplateForTemplateKey(scKey{}, sc.name, p.s.Tmpl)
|
tmpl := getShortcodeTemplateForTemplateKey(scKey{}, sc.name, p.s.Tmpl)
|
||||||
|
@ -576,11 +576,11 @@ Loop:
|
||||||
// named params
|
// named params
|
||||||
if sc.params == nil {
|
if sc.params == nil {
|
||||||
params := make(map[string]string)
|
params := make(map[string]string)
|
||||||
params[currItem.Val] = pt.Next().Val
|
params[currItem.ValStr()] = pt.Next().ValStr()
|
||||||
sc.params = params
|
sc.params = params
|
||||||
} else {
|
} else {
|
||||||
if params, ok := sc.params.(map[string]string); ok {
|
if params, ok := sc.params.(map[string]string); ok {
|
||||||
params[currItem.Val] = pt.Next().Val
|
params[currItem.ValStr()] = pt.Next().ValStr()
|
||||||
} else {
|
} else {
|
||||||
return sc, errShortCodeIllegalState
|
return sc, errShortCodeIllegalState
|
||||||
}
|
}
|
||||||
|
@ -590,11 +590,11 @@ Loop:
|
||||||
// positional params
|
// positional params
|
||||||
if sc.params == nil {
|
if sc.params == nil {
|
||||||
var params []string
|
var params []string
|
||||||
params = append(params, currItem.Val)
|
params = append(params, currItem.ValStr())
|
||||||
sc.params = params
|
sc.params = params
|
||||||
} else {
|
} else {
|
||||||
if params, ok := sc.params.([]string); ok {
|
if params, ok := sc.params.([]string); ok {
|
||||||
params = append(params, currItem.Val)
|
params = append(params, currItem.ValStr())
|
||||||
sc.params = params
|
sc.params = params
|
||||||
} else {
|
} else {
|
||||||
return sc, errShortCodeIllegalState
|
return sc, errShortCodeIllegalState
|
||||||
|
@ -613,19 +613,21 @@ Loop:
|
||||||
return sc, nil
|
return sc, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *shortcodeHandler) extractShortcodes(stringToParse string, p *PageWithoutContent) (string, error) {
|
var shortCodeStart = []byte("{{")
|
||||||
|
|
||||||
startIdx := strings.Index(stringToParse, "{{")
|
func (s *shortcodeHandler) extractShortcodes(input []byte, p *PageWithoutContent) (string, error) {
|
||||||
|
|
||||||
|
startIdx := bytes.Index(input, shortCodeStart)
|
||||||
|
|
||||||
// short cut for docs with no shortcodes
|
// short cut for docs with no shortcodes
|
||||||
if startIdx < 0 {
|
if startIdx < 0 {
|
||||||
return stringToParse, nil
|
return string(input), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// the parser takes a string;
|
// the parser takes a string;
|
||||||
// since this is an internal API, it could make sense to use the mutable []byte all the way, but
|
// since this is an internal API, it could make sense to use the mutable []byte all the way, but
|
||||||
// it seems that the time isn't really spent in the byte copy operations, and the impl. gets a lot cleaner
|
// it seems that the time isn't really spent in the byte copy operations, and the impl. gets a lot cleaner
|
||||||
pt := pageparser.ParseFrom(stringToParse, startIdx)
|
pt := pageparser.ParseFrom(input, startIdx)
|
||||||
|
|
||||||
result := bp.GetBuffer()
|
result := bp.GetBuffer()
|
||||||
defer bp.PutBuffer(result)
|
defer bp.PutBuffer(result)
|
||||||
|
@ -642,7 +644,7 @@ Loop:
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case currItem.IsText():
|
case currItem.IsText():
|
||||||
result.WriteString(currItem.Val)
|
result.WriteString(currItem.ValStr())
|
||||||
case currItem.IsLeftShortcodeDelim():
|
case currItem.IsLeftShortcodeDelim():
|
||||||
// let extractShortcode handle left delim (will do so recursively)
|
// let extractShortcode handle left delim (will do so recursively)
|
||||||
pt.Backup()
|
pt.Backup()
|
||||||
|
|
|
@ -424,7 +424,7 @@ func TestExtractShortcodes(t *testing.T) {
|
||||||
return fmt.Sprintf("HAHA%s-%dHBHB", shortcodePlaceholderPrefix, counter)
|
return fmt.Sprintf("HAHA%s-%dHBHB", shortcodePlaceholderPrefix, counter)
|
||||||
}
|
}
|
||||||
|
|
||||||
content, err := s.extractShortcodes(this.input, p.withoutContent())
|
content, err := s.extractShortcodes([]byte(this.input), p.withoutContent())
|
||||||
|
|
||||||
if b, ok := this.expect.(bool); ok && !b {
|
if b, ok := this.expect.(bool); ok && !b {
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
|
|
@ -21,6 +21,10 @@ type Item struct {
|
||||||
Val []byte
|
Val []byte
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (i Item) ValStr() string {
|
||||||
|
return string(i.Val)
|
||||||
|
}
|
||||||
|
|
||||||
func (i Item) IsText() bool {
|
func (i Item) IsText() bool {
|
||||||
return i.typ == tText
|
return i.typ == tText
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,17 +60,6 @@ type pageLexer struct {
|
||||||
items []Item
|
items []Item
|
||||||
}
|
}
|
||||||
|
|
||||||
func Parse(s string) *Tokens {
|
|
||||||
return ParseFrom(s, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ParseFrom(s string, from int) *Tokens {
|
|
||||||
input := []byte(s)
|
|
||||||
lexer := newPageLexer(input, pos(from), lexMainSection) // TODO(bep) 2errors
|
|
||||||
lexer.run()
|
|
||||||
return &Tokens{lexer: lexer}
|
|
||||||
}
|
|
||||||
|
|
||||||
// note: the input position here is normally 0 (start), but
|
// note: the input position here is normally 0 (start), but
|
||||||
// can be set if position of first shortcode is known
|
// can be set if position of first shortcode is known
|
||||||
// TODO(bep) 2errors byte
|
// TODO(bep) 2errors byte
|
||||||
|
|
|
@ -17,7 +17,15 @@
|
||||||
// See slides here: http://cuddle.googlecode.com/hg/talk/lex.html
|
// See slides here: http://cuddle.googlecode.com/hg/talk/lex.html
|
||||||
package pageparser
|
package pageparser
|
||||||
|
|
||||||
// The lexical scanning below
|
func Parse(input []byte) *Tokens {
|
||||||
|
return ParseFrom(input, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
func ParseFrom(input []byte, from int) *Tokens {
|
||||||
|
lexer := newPageLexer(input, pos(from), lexMainSection) // TODO(bep) 2errors
|
||||||
|
lexer.run()
|
||||||
|
return &Tokens{lexer: lexer}
|
||||||
|
}
|
||||||
|
|
||||||
type Tokens struct {
|
type Tokens struct {
|
||||||
lexer *pageLexer
|
lexer *pageLexer
|
||||||
|
|
Loading…
Reference in a new issue