ref: 2f026ab3f3866852794c4c9ceec834b54b0e0921
parent: 971d1baf1393c07081607bd62a87da545d9cb34e
author: Bjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>
date: Thu Dec 1 05:21:49 EST 2016
hugolib: Make RawContent raw again This was a regression introduced in Hugo 0.17. Fixes #2601
--- a/hugolib/handler_page.go
+++ b/hugolib/handler_page.go
@@ -72,6 +72,9 @@
panic(fmt.Sprintf("Page %q already rendered, does not need conversion", p.BaseFileName()))}
+ // Work on a copy of the raw content from now on.
+ p.createWorkContentCopy()
+
p.ProcessShortcodes(t)
return HandledResult{err: nil}@@ -109,19 +112,22 @@
panic(fmt.Sprintf("Page %q already rendered, does not need conversion", p.BaseFileName()))}
+ // Work on a copy of the raw content from now on.
+ p.createWorkContentCopy()
+
p.ProcessShortcodes(t)
// TODO(bep) these page handlers need to be re-evaluated, as it is hard to
// process a page in isolation. See the new preRender func.
if viper.GetBool("enableEmoji") {- p.rawContent = helpers.Emojify(p.rawContent)
+ p.workContent = helpers.Emojify(p.workContent)
}
// We have to replace the <!--more--> with something that survives all the
// rendering engines.
// TODO(bep) inline replace
- p.rawContent = bytes.Replace(p.rawContent, []byte(helpers.SummaryDivider), internalSummaryDivider, 1)
- p.rawContent = p.renderContent(p.rawContent)
+ p.workContent = bytes.Replace(p.workContent, []byte(helpers.SummaryDivider), internalSummaryDivider, 1)
+ p.workContent = p.renderContent(p.workContent)
return HandledResult{err: nil}}
--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -368,23 +368,23 @@
// If in watch mode, we need to keep the original so we can
// repeat this process on rebuild.
- var rawContentCopy []byte
+ var workContentCopy []byte
if cfg.Watching {- rawContentCopy = make([]byte, len(p.rawContent))
- copy(rawContentCopy, p.rawContent)
+ workContentCopy = make([]byte, len(p.workContent))
+ copy(workContentCopy, p.workContent)
} else {// Just reuse the same slice.
- rawContentCopy = p.rawContent
+ workContentCopy = p.workContent
}
if p.Markup == "markdown" {- tmpContent, tmpTableOfContents := helpers.ExtractTOC(rawContentCopy)
+ tmpContent, tmpTableOfContents := helpers.ExtractTOC(workContentCopy)
p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents)
- rawContentCopy = tmpContent
+ workContentCopy = tmpContent
}
var err error
- if rawContentCopy, err = handleShortcodes(p, s.owner.tmpl, rawContentCopy); err != nil {+ if workContentCopy, err = handleShortcodes(p, s.owner.tmpl, workContentCopy); err != nil { jww.ERROR.Printf("Failed to handle shortcodes for page %s: %s", p.BaseFileName(), err)}
@@ -391,15 +391,15 @@
if p.Markup != "html" {// Now we know enough to create a summary of the page and count some words
- summaryContent, err := p.setUserDefinedSummaryIfProvided(rawContentCopy)
+ summaryContent, err := p.setUserDefinedSummaryIfProvided(workContentCopy)
if err != nil { jww.ERROR.Printf("Failed to set user defined summary for page %q: %s", p.Path(), err) } else if summaryContent != nil {- rawContentCopy = summaryContent.content
+ workContentCopy = summaryContent.content
}
- p.Content = helpers.BytesToHTML(rawContentCopy)
+ p.Content = helpers.BytesToHTML(workContentCopy)
if summaryContent == nil {p.setAutoSummary()
@@ -406,11 +406,11 @@
}
} else {- p.Content = helpers.BytesToHTML(rawContentCopy)
+ p.Content = helpers.BytesToHTML(workContentCopy)
}
// no need for this anymore
- rawContentCopy = nil
+ workContentCopy = nil
//analyze for raw stats
p.analyzePage()
--- a/hugolib/hugo_sites_build_test.go
+++ b/hugolib/hugo_sites_build_test.go
@@ -560,7 +560,7 @@
for _, p := range s.rawAllPages {// No HTML when not processed
- require.Equal(t, p.shouldBuild(), bytes.Contains(p.rawContent, []byte("</")), p.BaseFileName()+": "+string(p.rawContent))+ require.Equal(t, p.shouldBuild(), bytes.Contains(p.workContent, []byte("</")), p.BaseFileName()+": "+string(p.workContent))require.Equal(t, p.shouldBuild(), p.Content != "", p.BaseFileName())
require.Equal(t, p.shouldBuild(), p.Content != "", p.BaseFileName())
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -129,12 +129,12 @@
frontmatter []byte
- // rawContent isn't "raw" as in the same as in the content file.
- // Hugo cares about memory consumption, so we make changes to it to do
- // markdown rendering etc., but it is "raw enough" so we can do rebuilds
- // when shortcode changes etc.
+ // rawContent is the raw content read from the content file.
rawContent []byte
+ // workContent is a copy of rawContent that may be mutated during site build.
+ workContent []byte
+
// state telling if this is a "new page" or if we have rendered it previously.
rendered bool
@@ -292,6 +292,11 @@
return -1
}
+func (p *Page) createWorkContentCopy() {+ p.workContent = make([]byte, len(p.rawContent))
+ copy(p.workContent, p.rawContent)
+}
+
func (p *Page) Plain() string {p.initPlain()
return p.plain
@@ -1389,8 +1394,8 @@
}
func (p *Page) ProcessShortcodes(t tpl.Template) {- tmpContent, tmpContentShortCodes, _ := extractAndRenderShortcodes(string(p.rawContent), p, t)
- p.rawContent = []byte(tmpContent)
+ tmpContent, tmpContentShortCodes, _ := extractAndRenderShortcodes(string(p.workContent), p, t)
+ p.workContent = []byte(tmpContent)
p.contentShortCodes = tmpContentShortCodes
}
--- a/hugolib/page_test.go
+++ b/hugolib/page_test.go
@@ -753,6 +753,26 @@
}
}
+// Issue #2601
+func TestPageRawContent(t *testing.T) {+ s := newSiteFromSources("raw.md", `---+title: Raw
+---
+**Raw**`)
+
+ writeSource(t, filepath.Join("layouts", "_default", "single.html"), `{{ .RawContent }}`)+
+ if err := buildSiteSkipRender(s); err != nil {+ t.Fatalf("Failed to build site: %s", err)+ }
+
+ require.Len(t, s.RegularPages, 1)
+ p := s.RegularPages[0]
+
+ require.Contains(t, p.RawContent(), "**Raw**")
+
+}
+
func TestPageWithShortCodeInSummary(t *testing.T) { assertFunc := func(t *testing.T, ext string, pages Pages) {--
⑨