This commit adds the interface ContentReWriter in the tranformer chain.
This is backed by two pooled byte buffers, alternating between being the reader or the writer.
This keeps the performance characteristic of the old implementation, but in a thread safe way.
Fixes #911
Benchmark old vs new:
benchmark old ns/op new ns/op delta
BenchmarkAbsURL 17614 17384 -1.31%
BenchmarkXMLAbsURL 9431 9248 -1.94%
benchmark old allocs new allocs delta
BenchmarkAbsURL 24 28 +16.67%
BenchmarkXMLAbsURL 12 14 +16.67%
benchmark old bytes new bytes delta
BenchmarkAbsURL 3295 3424 +3.92%
BenchmarkXMLAbsURL 1954 1987 +1.69%
func AbsURL(absURL string) (trs []link, err error) {
initAbsURLReplacer(absURL)
- trs = append(trs, func(content []byte) []byte {
- return ar.replaceInHTML(content)
+ trs = append(trs, func(rw ContentReWriter) {
+ ar.replaceInHTML(rw)
})
return
}
func AbsURLInXML(absURL string) (trs []link, err error) {
initAbsURLReplacer(absURL)
- trs = append(trs, func(content []byte) []byte {
- return ar.replaceInXML(content)
+ trs = append(trs, func(rw ContentReWriter) {
+ ar.replaceInXML(rw)
})
return
}
import (
"bytes"
- bp "github.com/spf13/hugo/bufferpool"
+ "io"
"net/url"
"strings"
"unicode/utf8"
state stateFunc
prefixLookup *prefixes
- b *bytes.Buffer
+ w io.Writer
}
type stateFunc func(*contentlexer) stateFunc
}
func (l *contentlexer) emit() {
- l.b.Write(l.content[l.start:l.pos])
+ l.w.Write(l.content[l.start:l.pos])
l.start = l.pos
}
l.emit()
}
l.pos += len(m.match)
- l.b.Write(m.replacement)
+ l.w.Write(m.replacement)
l.start = l.pos
return
}
l.width = width
l.pos += l.width
-
if r == ' ' {
l.prefixLookup.ms = matchStateWhitespace
} else if l.prefixLookup.ms != matchStateNone {
}
}
-func doReplace(content []byte, matchers []absURLMatcher) []byte {
- b := bp.GetBuffer()
- defer bp.PutBuffer(b)
+func doReplace(rw ContentReWriter, matchers []absURLMatcher) {
- lexer := &contentlexer{content: content,
- b: b,
+ lexer := &contentlexer{
+ content: rw.Content(),
+ w: rw,
prefixLookup: &prefixes{pr: mainPrefixRunes},
matchers: matchers}
lexer.replace()
- return b.Bytes()
}
type absURLReplacer struct {
}
-func (au *absURLReplacer) replaceInHTML(content []byte) []byte {
- return doReplace(content, au.htmlMatchers)
+func (au *absURLReplacer) replaceInHTML(rw ContentReWriter) {
+ doReplace(rw, au.htmlMatchers)
}
-func (au *absURLReplacer) replaceInXML(content []byte) []byte {
- return doReplace(content, au.xmlMatchers)
+func (au *absURLReplacer) replaceInXML(rw ContentReWriter) {
+ doReplace(rw, au.xmlMatchers)
}
package transform
import (
- "io"
-
+ "bytes"
bp "github.com/spf13/hugo/bufferpool"
+ "io"
)
-type trans func([]byte) []byte
+type trans func(rw ContentReWriter)
type link trans
return make([]link, 0, 20)
}
-func (c *chain) Apply(w io.Writer, r io.Reader) (err error) {
- buffer := bp.GetBuffer()
- defer bp.PutBuffer(buffer)
+// ContentReWriter is an interface that enables rotation
+// of pooled buffers in the transformer chain.
+type ContentReWriter interface {
+ Content() []byte
+ io.Writer
+}
+
+// Implements ContentReWriter
+// Content is read from the from-buffer,
+// and rewritten to to the to-buffer.
+type fromToBuffer struct {
+ from *bytes.Buffer
+ to *bytes.Buffer
+}
+
+func (ft fromToBuffer) Write(p []byte) (n int, err error) {
+ return ft.to.Write(p)
+}
+
+func (ft fromToBuffer) Content() []byte {
+ return ft.from.Bytes()
+}
- buffer.ReadFrom(r)
- b := buffer.Bytes()
- for _, tr := range *c {
- b = tr(b)
+func (c *chain) Apply(w io.Writer, r io.Reader) error {
+
+ b1 := bp.GetBuffer()
+ defer bp.PutBuffer(b1)
+
+ b1.ReadFrom(r)
+
+ if len(*c) == 0 {
+ b1.WriteTo(w)
+ return nil
}
- buffer.Reset()
- buffer.Write(b)
- buffer.WriteTo(w)
- return
+
+ b2 := bp.GetBuffer()
+ defer bp.PutBuffer(b2)
+
+ fb := &fromToBuffer{from: b1, to: b2}
+
+ for i, tr := range *c {
+ if i > 0 {
+ if fb.from == b1 {
+ fb.from = b2
+ fb.to = b1
+ fb.to.Reset()
+ } else {
+ fb.from = b1
+ fb.to = b2
+ fb.to.Reset()
+ }
+ }
+
+ tr(fb)
+ }
+
+ fb.to.WriteTo(w)
+ return nil
}
import (
"bytes"
+ "github.com/spf13/hugo/helpers"
"strings"
"testing"
)
}
}
+func TestChaingMultipleTransformers(t *testing.T) {
+ f1 := func(rw ContentReWriter) {
+ rw.Write(bytes.Replace(rw.Content(), []byte("f1"), []byte("f1r"), -1))
+ }
+ f2 := func(rw ContentReWriter) {
+ rw.Write(bytes.Replace(rw.Content(), []byte("f2"), []byte("f2r"), -1))
+ }
+ f3 := func(rw ContentReWriter) {
+ rw.Write(bytes.Replace(rw.Content(), []byte("f3"), []byte("f3r"), -1))
+ }
+
+ f4 := func(rw ContentReWriter) {
+ rw.Write(bytes.Replace(rw.Content(), []byte("f4"), []byte("f4r"), -1))
+ }
+
+ tr := NewChain(f1, f2, f3, f4)
+
+ out := new(bytes.Buffer)
+ if err := tr.Apply(out, helpers.StringToReader("Test: f4 f3 f1 f2 f1 The End.")); err != nil {
+ t.Errorf("Multi transformer chain returned an error: %s", err)
+ }
+
+ expected := "Test: f4r f3r f1r f2r f1r The End."
+
+ if string(out.Bytes()) != expected {
+ t.Errorf("Expected %s got %s", expected, string(out.Bytes()))
+ }
+}
+
func BenchmarkAbsURL(b *testing.B) {
absURL, _ := AbsURL("http://base")
tr := NewChain(absURL...)
import (
"bytes"
- jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
)
-func LiveReloadInject(content []byte) (injected []byte) {
- defer func() {
- if r := recover(); r != nil {
- jww.ERROR.Println("Recovered in LiveReloadInject", r)
- injected = content
- }
- }()
+func LiveReloadInject(rw ContentReWriter) {
match := []byte("</body>")
port := viper.GetString("port")
replace := []byte(`<script>document.write('<script src="http://'
+ (location.host || 'localhost').split(':')[0]
+ ':` + port + `/livereload.js?mindelay=10"></'
+ 'script>')</script></body>`)
- newcontent := bytes.Replace(content, match, replace, -1)
+ newcontent := bytes.Replace(rw.Content(), match, replace, -1)
- if len(newcontent) == len(content) {
+ if len(newcontent) == len(rw.Content()) {
match := []byte("</BODY>")
- newcontent = bytes.Replace(content, match, replace, -1)
+ newcontent = bytes.Replace(rw.Content(), match, replace, -1)
}
- return newcontent
+ rw.Write(newcontent)
}