mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-07 20:30:36 -05:00
parent
6f3716dc22
commit
d6c16afde0
3 changed files with 190 additions and 132 deletions
|
@ -22,6 +22,8 @@ import (
|
|||
"regexp"
|
||||
"sort"
|
||||
|
||||
"github.com/gohugoio/hugo/parser/pageparser"
|
||||
|
||||
_errors "github.com/pkg/errors"
|
||||
|
||||
"strings"
|
||||
|
@ -478,18 +480,18 @@ var errShortCodeIllegalState = errors.New("Illegal shortcode state")
|
|||
// pageTokens state:
|
||||
// - before: positioned just before the shortcode start
|
||||
// - after: shortcode(s) consumed (plural when they are nested)
|
||||
func (s *shortcodeHandler) extractShortcode(ordinal int, pt *pageTokens, p *PageWithoutContent) (*shortcode, error) {
|
||||
func (s *shortcodeHandler) extractShortcode(ordinal int, pt *pageparser.Tokens, p *PageWithoutContent) (*shortcode, error) {
|
||||
sc := &shortcode{ordinal: ordinal}
|
||||
var isInner = false
|
||||
|
||||
var currItem item
|
||||
var cnt = 0
|
||||
var nestedOrdinal = 0
|
||||
|
||||
// TODO(bep) 2errors revisit after https://github.com/gohugoio/hugo/issues/5324
|
||||
msgf := func(i item, format string, args ...interface{}) string {
|
||||
msgf := func(i pageparser.Item, format string, args ...interface{}) string {
|
||||
format = format + ":%d:"
|
||||
c1 := strings.Count(pt.lexer.input[:i.pos], "\n") + 1
|
||||
// TODO(bep) 2errors
|
||||
c1 := 32 // strings.Count(pt.lexer.input[:i.pos], "\n") + 1
|
||||
c2 := bytes.Count(p.frontmatter, []byte{'\n'})
|
||||
args = append(args, c1+c2)
|
||||
return fmt.Sprintf(format, args...)
|
||||
|
@ -498,18 +500,17 @@ func (s *shortcodeHandler) extractShortcode(ordinal int, pt *pageTokens, p *Page
|
|||
|
||||
Loop:
|
||||
for {
|
||||
currItem = pt.next()
|
||||
|
||||
switch currItem.typ {
|
||||
case tLeftDelimScWithMarkup, tLeftDelimScNoMarkup:
|
||||
next := pt.peek()
|
||||
if next.typ == tScClose {
|
||||
currItem := pt.Next()
|
||||
switch {
|
||||
case currItem.IsLeftShortcodeDelim():
|
||||
next := pt.Peek()
|
||||
if next.IsShortcodeClose() {
|
||||
continue
|
||||
}
|
||||
|
||||
if cnt > 0 {
|
||||
// nested shortcode; append it to inner content
|
||||
pt.backup3(currItem, next)
|
||||
pt.Backup3(currItem, next)
|
||||
nested, err := s.extractShortcode(nestedOrdinal, pt, p)
|
||||
nestedOrdinal++
|
||||
if nested.name != "" {
|
||||
|
@ -522,39 +523,39 @@ Loop:
|
|||
}
|
||||
|
||||
} else {
|
||||
sc.doMarkup = currItem.typ == tLeftDelimScWithMarkup
|
||||
sc.doMarkup = currItem.IsShortcodeMarkupDelimiter()
|
||||
}
|
||||
|
||||
cnt++
|
||||
|
||||
case tRightDelimScWithMarkup, tRightDelimScNoMarkup:
|
||||
case currItem.IsRightShortcodeDelim():
|
||||
// we trust the template on this:
|
||||
// if there's no inner, we're done
|
||||
if !isInner {
|
||||
return sc, nil
|
||||
}
|
||||
|
||||
case tScClose:
|
||||
next := pt.peek()
|
||||
case currItem.IsShortcodeClose():
|
||||
next := pt.Peek()
|
||||
if !isInner {
|
||||
if next.typ == tError {
|
||||
if next.IsError() {
|
||||
// return that error, more specific
|
||||
continue
|
||||
}
|
||||
return sc, errors.New(msgf(next, "shortcode %q has no .Inner, yet a closing tag was provided", next.val))
|
||||
return sc, errors.New(msgf(next, "shortcode %q has no .Inner, yet a closing tag was provided", next.Val))
|
||||
}
|
||||
if next.typ == tRightDelimScWithMarkup || next.typ == tRightDelimScNoMarkup {
|
||||
if next.IsRightShortcodeDelim() {
|
||||
// self-closing
|
||||
pt.consume(1)
|
||||
pt.Consume(1)
|
||||
} else {
|
||||
pt.consume(2)
|
||||
pt.Consume(2)
|
||||
}
|
||||
|
||||
return sc, nil
|
||||
case tText:
|
||||
sc.inner = append(sc.inner, currItem.val)
|
||||
case tScName:
|
||||
sc.name = currItem.val
|
||||
case currItem.IsText():
|
||||
sc.inner = append(sc.inner, currItem.Val)
|
||||
case currItem.IsShortcodeName():
|
||||
sc.name = currItem.Val
|
||||
// We pick the first template for an arbitrary output format
|
||||
// if more than one. It is "all inner or no inner".
|
||||
tmpl := getShortcodeTemplateForTemplateKey(scKey{}, sc.name, p.s.Tmpl)
|
||||
|
@ -568,18 +569,18 @@ Loop:
|
|||
return sc, _errors.Wrap(err, msgf(currItem, "failed to handle template for shortcode %q", sc.name))
|
||||
}
|
||||
|
||||
case tScParam:
|
||||
if !pt.isValueNext() {
|
||||
case currItem.IsShortcodeParam():
|
||||
if !pt.IsValueNext() {
|
||||
continue
|
||||
} else if pt.peek().typ == tScParamVal {
|
||||
} else if pt.Peek().IsShortcodeParamVal() {
|
||||
// named params
|
||||
if sc.params == nil {
|
||||
params := make(map[string]string)
|
||||
params[currItem.val] = pt.next().val
|
||||
params[currItem.Val] = pt.Next().Val
|
||||
sc.params = params
|
||||
} else {
|
||||
if params, ok := sc.params.(map[string]string); ok {
|
||||
params[currItem.val] = pt.next().val
|
||||
params[currItem.Val] = pt.Next().Val
|
||||
} else {
|
||||
return sc, errShortCodeIllegalState
|
||||
}
|
||||
|
@ -589,11 +590,11 @@ Loop:
|
|||
// positional params
|
||||
if sc.params == nil {
|
||||
var params []string
|
||||
params = append(params, currItem.val)
|
||||
params = append(params, currItem.Val)
|
||||
sc.params = params
|
||||
} else {
|
||||
if params, ok := sc.params.([]string); ok {
|
||||
params = append(params, currItem.val)
|
||||
params = append(params, currItem.Val)
|
||||
sc.params = params
|
||||
} else {
|
||||
return sc, errShortCodeIllegalState
|
||||
|
@ -602,9 +603,9 @@ Loop:
|
|||
}
|
||||
}
|
||||
|
||||
case tError, tEOF:
|
||||
case currItem.IsDone():
|
||||
// handled by caller
|
||||
pt.backup()
|
||||
pt.Backup()
|
||||
break Loop
|
||||
|
||||
}
|
||||
|
@ -624,7 +625,7 @@ func (s *shortcodeHandler) extractShortcodes(stringToParse string, p *PageWithou
|
|||
// the parser takes a string;
|
||||
// since this is an internal API, it could make sense to use the mutable []byte all the way, but
|
||||
// it seems that the time isn't really spent in the byte copy operations, and the impl. gets a lot cleaner
|
||||
pt := &pageTokens{lexer: newShortcodeLexer("parse-page", stringToParse, pos(startIdx))}
|
||||
pt := pageparser.ParseFrom(stringToParse, startIdx)
|
||||
|
||||
result := bp.GetBuffer()
|
||||
defer bp.PutBuffer(result)
|
||||
|
@ -632,20 +633,19 @@ func (s *shortcodeHandler) extractShortcodes(stringToParse string, p *PageWithou
|
|||
|
||||
// the parser is guaranteed to return items in proper order or fail, so …
|
||||
// … it's safe to keep some "global" state
|
||||
var currItem item
|
||||
var currShortcode shortcode
|
||||
var ordinal int
|
||||
|
||||
Loop:
|
||||
for {
|
||||
currItem = pt.next()
|
||||
currItem := pt.Next()
|
||||
|
||||
switch currItem.typ {
|
||||
case tText:
|
||||
result.WriteString(currItem.val)
|
||||
case tLeftDelimScWithMarkup, tLeftDelimScNoMarkup:
|
||||
switch {
|
||||
case currItem.IsText():
|
||||
result.WriteString(currItem.Val)
|
||||
case currItem.IsLeftShortcodeDelim():
|
||||
// let extractShortcode handle left delim (will do so recursively)
|
||||
pt.backup()
|
||||
pt.Backup()
|
||||
|
||||
currShortcode, err := s.extractShortcode(ordinal, pt, p)
|
||||
|
||||
|
@ -665,11 +665,11 @@ Loop:
|
|||
result.WriteString(placeHolder)
|
||||
ordinal++
|
||||
s.shortcodes.Add(placeHolder, currShortcode)
|
||||
case tEOF:
|
||||
case currItem.IsEOF():
|
||||
break Loop
|
||||
case tError:
|
||||
case currItem.IsError():
|
||||
err := fmt.Errorf("%s:shortcode:%d: %s",
|
||||
p.pathOrTitle(), (p.lineNumRawContentStart() + pt.lexer.lineNum() - 1), currItem)
|
||||
p.pathOrTitle(), (p.lineNumRawContentStart() + pt.LineNumber() - 1), currItem)
|
||||
currShortcode.err = err
|
||||
return result.String(), err
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -11,7 +11,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
package pageparser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
@ -26,13 +26,13 @@ import (
|
|||
|
||||
// parsing
|
||||
|
||||
type pageTokens struct {
|
||||
type Tokens struct {
|
||||
lexer *pagelexer
|
||||
token [3]item // 3-item look-ahead is what we currently need
|
||||
token [3]Item // 3-item look-ahead is what we currently need
|
||||
peekCount int
|
||||
}
|
||||
|
||||
func (t *pageTokens) next() item {
|
||||
func (t *Tokens) Next() Item {
|
||||
if t.peekCount > 0 {
|
||||
t.peekCount--
|
||||
} else {
|
||||
|
@ -42,32 +42,32 @@ func (t *pageTokens) next() item {
|
|||
}
|
||||
|
||||
// backs up one token.
|
||||
func (t *pageTokens) backup() {
|
||||
func (t *Tokens) Backup() {
|
||||
t.peekCount++
|
||||
}
|
||||
|
||||
// backs up two tokens.
|
||||
func (t *pageTokens) backup2(t1 item) {
|
||||
func (t *Tokens) Backup2(t1 Item) {
|
||||
t.token[1] = t1
|
||||
t.peekCount = 2
|
||||
}
|
||||
|
||||
// backs up three tokens.
|
||||
func (t *pageTokens) backup3(t2, t1 item) {
|
||||
func (t *Tokens) Backup3(t2, t1 Item) {
|
||||
t.token[1] = t1
|
||||
t.token[2] = t2
|
||||
t.peekCount = 3
|
||||
}
|
||||
|
||||
// check for non-error and non-EOF types coming next
|
||||
func (t *pageTokens) isValueNext() bool {
|
||||
i := t.peek()
|
||||
func (t *Tokens) IsValueNext() bool {
|
||||
i := t.Peek()
|
||||
return i.typ != tError && i.typ != tEOF
|
||||
}
|
||||
|
||||
// look at, but do not consume, the next item
|
||||
// repeated, sequential calls will return the same item
|
||||
func (t *pageTokens) peek() item {
|
||||
func (t *Tokens) Peek() Item {
|
||||
if t.peekCount > 0 {
|
||||
return t.token[t.peekCount-1]
|
||||
}
|
||||
|
@ -76,40 +76,90 @@ func (t *pageTokens) peek() item {
|
|||
return t.token[0]
|
||||
}
|
||||
|
||||
// convencience method to consume the next n tokens, but back off Errors and EOF
|
||||
func (t *pageTokens) consume(cnt int) {
|
||||
// Consume is a convencience method to consume the next n tokens,
|
||||
// but back off Errors and EOF.
|
||||
func (t *Tokens) Consume(cnt int) {
|
||||
for i := 0; i < cnt; i++ {
|
||||
token := t.next()
|
||||
token := t.Next()
|
||||
if token.typ == tError || token.typ == tEOF {
|
||||
t.backup()
|
||||
t.Backup()
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// LineNumber returns the current line number. Used for logging.
|
||||
func (t *Tokens) LineNumber() int {
|
||||
return t.lexer.lineNum()
|
||||
}
|
||||
|
||||
// lexical scanning
|
||||
|
||||
// position (in bytes)
|
||||
type pos int
|
||||
|
||||
type item struct {
|
||||
type Item struct {
|
||||
typ itemType
|
||||
pos pos
|
||||
val string
|
||||
Val string
|
||||
}
|
||||
|
||||
func (i item) String() string {
|
||||
func (i Item) IsText() bool {
|
||||
return i.typ == tText
|
||||
}
|
||||
|
||||
func (i Item) IsShortcodeName() bool {
|
||||
return i.typ == tScName
|
||||
}
|
||||
|
||||
func (i Item) IsLeftShortcodeDelim() bool {
|
||||
return i.typ == tLeftDelimScWithMarkup || i.typ == tLeftDelimScNoMarkup
|
||||
}
|
||||
|
||||
func (i Item) IsRightShortcodeDelim() bool {
|
||||
return i.typ == tRightDelimScWithMarkup || i.typ == tRightDelimScNoMarkup
|
||||
}
|
||||
|
||||
func (i Item) IsShortcodeClose() bool {
|
||||
return i.typ == tScClose
|
||||
}
|
||||
|
||||
func (i Item) IsShortcodeParam() bool {
|
||||
return i.typ == tScParam
|
||||
}
|
||||
|
||||
func (i Item) IsShortcodeParamVal() bool {
|
||||
return i.typ == tScParamVal
|
||||
}
|
||||
|
||||
func (i Item) IsShortcodeMarkupDelimiter() bool {
|
||||
return i.typ == tLeftDelimScWithMarkup || i.typ == tRightDelimScWithMarkup
|
||||
}
|
||||
|
||||
func (i Item) IsDone() bool {
|
||||
return i.typ == tError || i.typ == tEOF
|
||||
}
|
||||
|
||||
func (i Item) IsEOF() bool {
|
||||
return i.typ == tEOF
|
||||
}
|
||||
|
||||
func (i Item) IsError() bool {
|
||||
return i.typ == tError
|
||||
}
|
||||
|
||||
func (i Item) String() string {
|
||||
switch {
|
||||
case i.typ == tEOF:
|
||||
return "EOF"
|
||||
case i.typ == tError:
|
||||
return i.val
|
||||
return i.Val
|
||||
case i.typ > tKeywordMarker:
|
||||
return fmt.Sprintf("<%s>", i.val)
|
||||
case len(i.val) > 20:
|
||||
return fmt.Sprintf("%.20q...", i.val)
|
||||
return fmt.Sprintf("<%s>", i.Val)
|
||||
case len(i.Val) > 20:
|
||||
return fmt.Sprintf("%.20q...", i.Val)
|
||||
}
|
||||
return fmt.Sprintf("[%s]", i.val)
|
||||
return fmt.Sprintf("[%s]", i.Val)
|
||||
}
|
||||
|
||||
type itemType int
|
||||
|
@ -159,7 +209,15 @@ type pagelexer struct {
|
|||
openShortcodes map[string]bool // set of shortcodes in open state
|
||||
|
||||
// items delivered to client
|
||||
items []item
|
||||
items []Item
|
||||
}
|
||||
|
||||
func Parse(s string) *Tokens {
|
||||
return ParseFrom(s, 0)
|
||||
}
|
||||
|
||||
func ParseFrom(s string, from int) *Tokens {
|
||||
return &Tokens{lexer: newShortcodeLexer("default", s, pos(from))}
|
||||
}
|
||||
|
||||
// note: the input position here is normally 0 (start), but
|
||||
|
@ -172,7 +230,7 @@ func newShortcodeLexer(name, input string, inputPosition pos) *pagelexer {
|
|||
currRightDelimItem: tRightDelimScNoMarkup,
|
||||
pos: inputPosition,
|
||||
openShortcodes: make(map[string]bool),
|
||||
items: make([]item, 0, 5),
|
||||
items: make([]Item, 0, 5),
|
||||
}
|
||||
lexer.runShortcodeLexer()
|
||||
return lexer
|
||||
|
@ -225,7 +283,7 @@ func (l *pagelexer) backup() {
|
|||
|
||||
// sends an item back to the client.
|
||||
func (l *pagelexer) emit(t itemType) {
|
||||
l.items = append(l.items, item{t, l.start, l.input[l.start:l.pos]})
|
||||
l.items = append(l.items, Item{t, l.start, l.input[l.start:l.pos]})
|
||||
l.start = l.pos
|
||||
}
|
||||
|
||||
|
@ -237,7 +295,7 @@ func (l *pagelexer) ignoreEscapesAndEmit(t itemType) {
|
|||
}
|
||||
return r
|
||||
}, l.input[l.start:l.pos])
|
||||
l.items = append(l.items, item{t, l.start, val})
|
||||
l.items = append(l.items, Item{t, l.start, val})
|
||||
l.start = l.pos
|
||||
}
|
||||
|
||||
|
@ -258,12 +316,12 @@ func (l *pagelexer) lineNum() int {
|
|||
|
||||
// nil terminates the parser
|
||||
func (l *pagelexer) errorf(format string, args ...interface{}) stateFunc {
|
||||
l.items = append(l.items, item{tError, l.start, fmt.Sprintf(format, args...)})
|
||||
l.items = append(l.items, Item{tError, l.start, fmt.Sprintf(format, args...)})
|
||||
return nil
|
||||
}
|
||||
|
||||
// consumes and returns the next item
|
||||
func (l *pagelexer) nextItem() item {
|
||||
func (l *pagelexer) nextItem() Item {
|
||||
item := l.items[0]
|
||||
l.items = l.items[1:]
|
||||
l.lastPos = item.pos
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -11,7 +11,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
package pageparser
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
@ -20,39 +20,39 @@ import (
|
|||
type shortCodeLexerTest struct {
|
||||
name string
|
||||
input string
|
||||
items []item
|
||||
items []Item
|
||||
}
|
||||
|
||||
var (
|
||||
tstEOF = item{tEOF, 0, ""}
|
||||
tstLeftNoMD = item{tLeftDelimScNoMarkup, 0, "{{<"}
|
||||
tstRightNoMD = item{tRightDelimScNoMarkup, 0, ">}}"}
|
||||
tstLeftMD = item{tLeftDelimScWithMarkup, 0, "{{%"}
|
||||
tstRightMD = item{tRightDelimScWithMarkup, 0, "%}}"}
|
||||
tstSCClose = item{tScClose, 0, "/"}
|
||||
tstSC1 = item{tScName, 0, "sc1"}
|
||||
tstSC2 = item{tScName, 0, "sc2"}
|
||||
tstSC3 = item{tScName, 0, "sc3"}
|
||||
tstSCSlash = item{tScName, 0, "sc/sub"}
|
||||
tstParam1 = item{tScParam, 0, "param1"}
|
||||
tstParam2 = item{tScParam, 0, "param2"}
|
||||
tstVal = item{tScParamVal, 0, "Hello World"}
|
||||
tstEOF = Item{tEOF, 0, ""}
|
||||
tstLeftNoMD = Item{tLeftDelimScNoMarkup, 0, "{{<"}
|
||||
tstRightNoMD = Item{tRightDelimScNoMarkup, 0, ">}}"}
|
||||
tstLeftMD = Item{tLeftDelimScWithMarkup, 0, "{{%"}
|
||||
tstRightMD = Item{tRightDelimScWithMarkup, 0, "%}}"}
|
||||
tstSCClose = Item{tScClose, 0, "/"}
|
||||
tstSC1 = Item{tScName, 0, "sc1"}
|
||||
tstSC2 = Item{tScName, 0, "sc2"}
|
||||
tstSC3 = Item{tScName, 0, "sc3"}
|
||||
tstSCSlash = Item{tScName, 0, "sc/sub"}
|
||||
tstParam1 = Item{tScParam, 0, "param1"}
|
||||
tstParam2 = Item{tScParam, 0, "param2"}
|
||||
tstVal = Item{tScParamVal, 0, "Hello World"}
|
||||
)
|
||||
|
||||
var shortCodeLexerTests = []shortCodeLexerTest{
|
||||
{"empty", "", []item{tstEOF}},
|
||||
{"spaces", " \t\n", []item{{tText, 0, " \t\n"}, tstEOF}},
|
||||
{"text", `to be or not`, []item{{tText, 0, "to be or not"}, tstEOF}},
|
||||
{"no markup", `{{< sc1 >}}`, []item{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}},
|
||||
{"with EOL", "{{< sc1 \n >}}", []item{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}},
|
||||
{"empty", "", []Item{tstEOF}},
|
||||
{"spaces", " \t\n", []Item{{tText, 0, " \t\n"}, tstEOF}},
|
||||
{"text", `to be or not`, []Item{{tText, 0, "to be or not"}, tstEOF}},
|
||||
{"no markup", `{{< sc1 >}}`, []Item{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}},
|
||||
{"with EOL", "{{< sc1 \n >}}", []Item{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}},
|
||||
|
||||
{"forward slash inside name", `{{< sc/sub >}}`, []item{tstLeftNoMD, tstSCSlash, tstRightNoMD, tstEOF}},
|
||||
{"forward slash inside name", `{{< sc/sub >}}`, []Item{tstLeftNoMD, tstSCSlash, tstRightNoMD, tstEOF}},
|
||||
|
||||
{"simple with markup", `{{% sc1 %}}`, []item{tstLeftMD, tstSC1, tstRightMD, tstEOF}},
|
||||
{"with spaces", `{{< sc1 >}}`, []item{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}},
|
||||
{"mismatched rightDelim", `{{< sc1 %}}`, []item{tstLeftNoMD, tstSC1,
|
||||
{"simple with markup", `{{% sc1 %}}`, []Item{tstLeftMD, tstSC1, tstRightMD, tstEOF}},
|
||||
{"with spaces", `{{< sc1 >}}`, []Item{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}},
|
||||
{"mismatched rightDelim", `{{< sc1 %}}`, []Item{tstLeftNoMD, tstSC1,
|
||||
{tError, 0, "unrecognized character in shortcode action: U+0025 '%'. Note: Parameters with non-alphanumeric args must be quoted"}}},
|
||||
{"inner, markup", `{{% sc1 %}} inner {{% /sc1 %}}`, []item{
|
||||
{"inner, markup", `{{% sc1 %}} inner {{% /sc1 %}}`, []Item{
|
||||
tstLeftMD,
|
||||
tstSC1,
|
||||
tstRightMD,
|
||||
|
@ -63,44 +63,44 @@ var shortCodeLexerTests = []shortCodeLexerTest{
|
|||
tstRightMD,
|
||||
tstEOF,
|
||||
}},
|
||||
{"close, but no open", `{{< /sc1 >}}`, []item{
|
||||
{"close, but no open", `{{< /sc1 >}}`, []Item{
|
||||
tstLeftNoMD, {tError, 0, "got closing shortcode, but none is open"}}},
|
||||
{"close wrong", `{{< sc1 >}}{{< /another >}}`, []item{
|
||||
{"close wrong", `{{< sc1 >}}{{< /another >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose,
|
||||
{tError, 0, "closing tag for shortcode 'another' does not match start tag"}}},
|
||||
{"close, but no open, more", `{{< sc1 >}}{{< /sc1 >}}{{< /another >}}`, []item{
|
||||
{"close, but no open, more", `{{< sc1 >}}{{< /sc1 >}}{{< /another >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose,
|
||||
{tError, 0, "closing tag for shortcode 'another' does not match start tag"}}},
|
||||
{"close with extra keyword", `{{< sc1 >}}{{< /sc1 keyword>}}`, []item{
|
||||
{"close with extra keyword", `{{< sc1 >}}{{< /sc1 keyword>}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, tstSC1,
|
||||
{tError, 0, "unclosed shortcode"}}},
|
||||
{"Youtube id", `{{< sc1 -ziL-Q_456igdO-4 >}}`, []item{
|
||||
{"Youtube id", `{{< sc1 -ziL-Q_456igdO-4 >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, {tScParam, 0, "-ziL-Q_456igdO-4"}, tstRightNoMD, tstEOF}},
|
||||
{"non-alphanumerics param quoted", `{{< sc1 "-ziL-.%QigdO-4" >}}`, []item{
|
||||
{"non-alphanumerics param quoted", `{{< sc1 "-ziL-.%QigdO-4" >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, {tScParam, 0, "-ziL-.%QigdO-4"}, tstRightNoMD, tstEOF}},
|
||||
|
||||
{"two params", `{{< sc1 param1 param2 >}}`, []item{
|
||||
{"two params", `{{< sc1 param1 param2 >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam1, tstParam2, tstRightNoMD, tstEOF}},
|
||||
// issue #934
|
||||
{"self-closing", `{{< sc1 />}}`, []item{
|
||||
{"self-closing", `{{< sc1 />}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD, tstEOF}},
|
||||
// Issue 2498
|
||||
{"multiple self-closing", `{{< sc1 />}}{{< sc1 />}}`, []item{
|
||||
{"multiple self-closing", `{{< sc1 />}}{{< sc1 />}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD,
|
||||
tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD, tstEOF}},
|
||||
{"self-closing with param", `{{< sc1 param1 />}}`, []item{
|
||||
{"self-closing with param", `{{< sc1 param1 />}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstEOF}},
|
||||
{"multiple self-closing with param", `{{< sc1 param1 />}}{{< sc1 param1 />}}`, []item{
|
||||
{"multiple self-closing with param", `{{< sc1 param1 />}}{{< sc1 param1 />}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD,
|
||||
tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstEOF}},
|
||||
{"multiple different self-closing with param", `{{< sc1 param1 />}}{{< sc2 param1 />}}`, []item{
|
||||
{"multiple different self-closing with param", `{{< sc1 param1 />}}{{< sc2 param1 />}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD,
|
||||
tstLeftNoMD, tstSC2, tstParam1, tstSCClose, tstRightNoMD, tstEOF}},
|
||||
{"nested simple", `{{< sc1 >}}{{< sc2 >}}{{< /sc1 >}}`, []item{
|
||||
{"nested simple", `{{< sc1 >}}{{< sc2 >}}{{< /sc1 >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstRightNoMD,
|
||||
tstLeftNoMD, tstSC2, tstRightNoMD,
|
||||
tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD, tstEOF}},
|
||||
{"nested complex", `{{< sc1 >}}ab{{% sc2 param1 %}}cd{{< sc3 >}}ef{{< /sc3 >}}gh{{% /sc2 %}}ij{{< /sc1 >}}kl`, []item{
|
||||
{"nested complex", `{{< sc1 >}}ab{{% sc2 param1 %}}cd{{< sc3 >}}ef{{< /sc3 >}}gh{{% /sc2 %}}ij{{< /sc1 >}}kl`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstRightNoMD,
|
||||
{tText, 0, "ab"},
|
||||
tstLeftMD, tstSC2, tstParam1, tstRightMD,
|
||||
|
@ -115,44 +115,44 @@ var shortCodeLexerTests = []shortCodeLexerTest{
|
|||
{tText, 0, "kl"}, tstEOF,
|
||||
}},
|
||||
|
||||
{"two quoted params", `{{< sc1 "param nr. 1" "param nr. 2" >}}`, []item{
|
||||
{"two quoted params", `{{< sc1 "param nr. 1" "param nr. 2" >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, {tScParam, 0, "param nr. 1"}, {tScParam, 0, "param nr. 2"}, tstRightNoMD, tstEOF}},
|
||||
{"two named params", `{{< sc1 param1="Hello World" param2="p2Val">}}`, []item{
|
||||
{"two named params", `{{< sc1 param1="Hello World" param2="p2Val">}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam1, tstVal, tstParam2, {tScParamVal, 0, "p2Val"}, tstRightNoMD, tstEOF}},
|
||||
{"escaped quotes", `{{< sc1 param1=\"Hello World\" >}}`, []item{
|
||||
{"escaped quotes", `{{< sc1 param1=\"Hello World\" >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam1, tstVal, tstRightNoMD, tstEOF}},
|
||||
{"escaped quotes, positional param", `{{< sc1 \"param1\" >}}`, []item{
|
||||
{"escaped quotes, positional param", `{{< sc1 \"param1\" >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam1, tstRightNoMD, tstEOF}},
|
||||
{"escaped quotes inside escaped quotes", `{{< sc1 param1=\"Hello \"escaped\" World\" >}}`, []item{
|
||||
{"escaped quotes inside escaped quotes", `{{< sc1 param1=\"Hello \"escaped\" World\" >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam1,
|
||||
{tScParamVal, 0, `Hello `}, {tError, 0, `got positional parameter 'escaped'. Cannot mix named and positional parameters`}}},
|
||||
{"escaped quotes inside nonescaped quotes",
|
||||
`{{< sc1 param1="Hello \"escaped\" World" >}}`, []item{
|
||||
`{{< sc1 param1="Hello \"escaped\" World" >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam1, {tScParamVal, 0, `Hello "escaped" World`}, tstRightNoMD, tstEOF}},
|
||||
{"escaped quotes inside nonescaped quotes in positional param",
|
||||
`{{< sc1 "Hello \"escaped\" World" >}}`, []item{
|
||||
`{{< sc1 "Hello \"escaped\" World" >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, {tScParam, 0, `Hello "escaped" World`}, tstRightNoMD, tstEOF}},
|
||||
{"unterminated quote", `{{< sc1 param2="Hello World>}}`, []item{
|
||||
{"unterminated quote", `{{< sc1 param2="Hello World>}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam2, {tError, 0, "unterminated quoted string in shortcode parameter-argument: 'Hello World>}}'"}}},
|
||||
{"one named param, one not", `{{< sc1 param1="Hello World" p2 >}}`, []item{
|
||||
{"one named param, one not", `{{< sc1 param1="Hello World" p2 >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam1, tstVal,
|
||||
{tError, 0, "got positional parameter 'p2'. Cannot mix named and positional parameters"}}},
|
||||
{"one named param, one quoted positional param", `{{< sc1 param1="Hello World" "And Universe" >}}`, []item{
|
||||
{"one named param, one quoted positional param", `{{< sc1 param1="Hello World" "And Universe" >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam1, tstVal,
|
||||
{tError, 0, "got quoted positional parameter. Cannot mix named and positional parameters"}}},
|
||||
{"one quoted positional param, one named param", `{{< sc1 "param1" param2="And Universe" >}}`, []item{
|
||||
{"one quoted positional param, one named param", `{{< sc1 "param1" param2="And Universe" >}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam1,
|
||||
{tError, 0, "got named parameter 'param2'. Cannot mix named and positional parameters"}}},
|
||||
{"ono positional param, one not", `{{< sc1 param1 param2="Hello World">}}`, []item{
|
||||
{"ono positional param, one not", `{{< sc1 param1 param2="Hello World">}}`, []Item{
|
||||
tstLeftNoMD, tstSC1, tstParam1,
|
||||
{tError, 0, "got named parameter 'param2'. Cannot mix named and positional parameters"}}},
|
||||
{"commented out", `{{</* sc1 */>}}`, []item{
|
||||
{"commented out", `{{</* sc1 */>}}`, []Item{
|
||||
{tText, 0, "{{<"}, {tText, 0, " sc1 "}, {tText, 0, ">}}"}, tstEOF}},
|
||||
{"commented out, with asterisk inside", `{{</* sc1 "**/*.pdf" */>}}`, []item{
|
||||
{"commented out, with asterisk inside", `{{</* sc1 "**/*.pdf" */>}}`, []Item{
|
||||
{tText, 0, "{{<"}, {tText, 0, " sc1 \"**/*.pdf\" "}, {tText, 0, ">}}"}, tstEOF}},
|
||||
{"commented out, missing close", `{{</* sc1 >}}`, []item{
|
||||
{"commented out, missing close", `{{</* sc1 >}}`, []Item{
|
||||
{tError, 0, "comment must be closed"}}},
|
||||
{"commented out, misplaced close", `{{</* sc1 >}}*/`, []item{
|
||||
{"commented out, misplaced close", `{{</* sc1 >}}*/`, []Item{
|
||||
{tError, 0, "comment must be closed"}}},
|
||||
}
|
||||
|
||||
|
@ -178,7 +178,7 @@ func BenchmarkShortcodeLexer(b *testing.B) {
|
|||
}
|
||||
}
|
||||
|
||||
func collect(t *shortCodeLexerTest) (items []item) {
|
||||
func collect(t *shortCodeLexerTest) (items []Item) {
|
||||
l := newShortcodeLexer(t.name, t.input, 0)
|
||||
for {
|
||||
item := l.nextItem()
|
||||
|
@ -191,7 +191,7 @@ func collect(t *shortCodeLexerTest) (items []item) {
|
|||
}
|
||||
|
||||
// no positional checking, for now ...
|
||||
func equal(i1, i2 []item) bool {
|
||||
func equal(i1, i2 []Item) bool {
|
||||
if len(i1) != len(i2) {
|
||||
return false
|
||||
}
|
||||
|
@ -199,7 +199,7 @@ func equal(i1, i2 []item) bool {
|
|||
if i1[k].typ != i2[k].typ {
|
||||
return false
|
||||
}
|
||||
if i1[k].val != i2[k].val {
|
||||
if i1[k].Val != i2[k].Val {
|
||||
return false
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue