mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
Remove the goroutines from the shortcode lexer
It was clever, but storing the items in a slice is faster -- and it gives room to more goroutines in other places. ```bash benchmark old ns/op new ns/op delta BenchmarkShortcodeLexer-4 180173 79614 -55.81% benchmark old allocs new allocs delta BenchmarkShortcodeLexer-4 309 328 +6.15% benchmark old bytes new bytes delta BenchmarkShortcodeLexer-4 35456 47008 +32.58% ```
This commit is contained in:
parent
3153526161
commit
243694102a
1 changed files with 8 additions and 9 deletions
|
@ -159,7 +159,7 @@ type pagelexer struct {
|
|||
openShortcodes map[string]bool // set of shortcodes in open state
|
||||
|
||||
// items delivered to client
|
||||
items chan item
|
||||
items []item
|
||||
}
|
||||
|
||||
// note: the input position here is normally 0 (start), but
|
||||
|
@ -172,9 +172,9 @@ func newShortcodeLexer(name, input string, inputPosition pos) *pagelexer {
|
|||
currRightDelimItem: tRightDelimScNoMarkup,
|
||||
pos: inputPosition,
|
||||
openShortcodes: make(map[string]bool),
|
||||
items: make(chan item),
|
||||
items: make([]item, 0, 5),
|
||||
}
|
||||
go lexer.runShortcodeLexer()
|
||||
lexer.runShortcodeLexer()
|
||||
return lexer
|
||||
}
|
||||
|
||||
|
@ -184,8 +184,6 @@ func (l *pagelexer) runShortcodeLexer() {
|
|||
for l.state = lexTextOutsideShortcodes; l.state != nil; {
|
||||
l.state = l.state(l)
|
||||
}
|
||||
|
||||
close(l.items)
|
||||
}
|
||||
|
||||
// state functions
|
||||
|
@ -227,7 +225,7 @@ func (l *pagelexer) backup() {
|
|||
|
||||
// sends an item back to the client.
|
||||
func (l *pagelexer) emit(t itemType) {
|
||||
l.items <- item{t, l.start, l.input[l.start:l.pos]}
|
||||
l.items = append(l.items, item{t, l.start, l.input[l.start:l.pos]})
|
||||
l.start = l.pos
|
||||
}
|
||||
|
||||
|
@ -239,7 +237,7 @@ func (l *pagelexer) ignoreEscapesAndEmit(t itemType) {
|
|||
}
|
||||
return r
|
||||
}, l.input[l.start:l.pos])
|
||||
l.items <- item{t, l.start, val}
|
||||
l.items = append(l.items, item{t, l.start, val})
|
||||
l.start = l.pos
|
||||
}
|
||||
|
||||
|
@ -260,13 +258,14 @@ func (l *pagelexer) lineNum() int {
|
|||
|
||||
// nil terminates the parser
|
||||
func (l *pagelexer) errorf(format string, args ...interface{}) stateFunc {
|
||||
l.items <- item{tError, l.start, fmt.Sprintf(format, args...)}
|
||||
l.items = append(l.items, item{tError, l.start, fmt.Sprintf(format, args...)})
|
||||
return nil
|
||||
}
|
||||
|
||||
// consumes and returns the next item
|
||||
func (l *pagelexer) nextItem() item {
|
||||
item := <-l.items
|
||||
item := l.items[0]
|
||||
l.items = l.items[1:]
|
||||
l.lastPos = item.pos
|
||||
return item
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue