ref: c8d3124ddeb86093caf1b18dfaa328c3053e5b63
parent: 9347084d61a91c73bba1e04790b029163c38bacf
author: Bjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>
date: Sun Nov 13 09:27:10 EST 2016
node to page: Remove Node And misc. TODO-fixes Updates #2297
--- a/hugolib/embedded_shortcodes_test.go
+++ b/hugolib/embedded_shortcodes_test.go
@@ -66,9 +66,9 @@
require.NoError(t, err)
require.NoError(t, sites.Build(BuildCfg{}))- require.Len(t, sites.Sites[0].regularPages, 1)
+ require.Len(t, sites.Sites[0].RegularPages, 1)
- output := string(sites.Sites[0].regularPages[0].Content)
+ output := string(sites.Sites[0].RegularPages[0].Content)
if !strings.Contains(output, expected) { t.Errorf("Got\n%q\nExpected\n%q", output, expected)--- a/hugolib/gitinfo.go
+++ b/hugolib/gitinfo.go
@@ -52,7 +52,10 @@
s := h.Sites[0]
for _, p := range s.AllPages {- // TODO(bep) np consider other nodes
+ if p.Path() == "" {+ // Home page etc. with no content file.
+ continue
+ }
// Git normalizes file paths on this form:
filename := path.Join(contentRoot, contentDir, filepath.ToSlash(p.Path()))
g, ok := gitMap[filename]
--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -211,8 +211,6 @@
// createMissingPages creates home page, taxonomies etc. that isnt't created as an
// effect of having a content file.
func (h *HugoSites) createMissingPages() error {- // TODO(bep) np check node title etc.
-
var newPages Pages
for _, s := range h.Sites {@@ -306,12 +304,11 @@
// Move the new* methods after cleanup in site.go
func (s *Site) newNodePage(typ string) *Page { return &Page{- Kind: typ,
- Node: Node{- Data: make(map[string]interface{}),- Site: &s.Info,
- language: s.Language,
- }, site: s}
+ Kind: typ,
+ Data: make(map[string]interface{}),+ Site: &s.Info,
+ language: s.Language,
+ site: s}
}
func (s *Site) newHomePage() *Page {@@ -321,8 +318,6 @@
p.Data["Pages"] = pages
p.Pages = pages
s.setPageURLs(p, "/")
- // TODO(bep) np check Data pages
- // TODO(bep) np check setURLs
return p
}
@@ -426,23 +421,7 @@
}
}
-// preRender performs build tasks that need to be done as late as possible.
-// Shortcode handling is the main task in here.
-// TODO(bep) We need to look at the whole handler-chain construct with he below in mind.
-// TODO(bep) np clean
-func (h *HugoSites) preRender(cfg BuildCfg, changed whatChanged) error {-
- for _, s := range h.Sites {- if err := s.setCurrentLanguageConfig(); err != nil {- return err
- }
- s.preparePagesForRender(cfg, changed)
- }
-
- return nil
-}
-
-func (s *Site) preparePagesForRender(cfg BuildCfg, changed whatChanged) {+func (s *Site) preparePagesForRender(cfg *BuildCfg) {pageChan := make(chan *Page)
wg := &sync.WaitGroup{}@@ -452,7 +431,7 @@
defer wg.Done()
for p := range pages {- if !changed.other && p.rendered {+ if !cfg.whatChanged.other && p.rendered {// No need to process it again.
continue
}
--- a/hugolib/hugo_sites_build.go
+++ b/hugolib/hugo_sites_build.go
@@ -169,11 +169,15 @@
return err
}
- if err := h.preRender(*config, whatChanged{source: true, other: true}); err != nil {- return err
+ for _, s := range h.Sites {+ if err := s.setCurrentLanguageConfig(); err != nil {+ return err
+ }
+ s.preparePagesForRender(config)
}
return nil
+
}
func (h *HugoSites) render(config *BuildCfg) error {--- a/hugolib/hugo_sites_build_test.go
+++ b/hugolib/hugo_sites_build_test.go
@@ -83,8 +83,8 @@
require.Equal(t, "/blog/en/foo", enSite.Info.pathSpec.RelURL("foo", true))- doc1en := enSite.regularPages[0]
- doc1fr := frSite.regularPages[0]
+ doc1en := enSite.RegularPages[0]
+ doc1fr := frSite.RegularPages[0]
enPerm, _ := doc1en.Permalink()
enRelPerm, _ := doc1en.RelPermalink()
@@ -216,24 +216,24 @@
assert.Equal(t, "en", enSite.Language.Lang)
- if len(enSite.regularPages) != 4 {+ if len(enSite.RegularPages) != 4 { t.Fatal("Expected 4 english pages")}
assert.Len(t, enSite.Source.Files(), 14, "should have 13 source files")
assert.Len(t, enSite.AllPages, 28, "should have 28 total pages (including translations and index types)")
- doc1en := enSite.regularPages[0]
+ doc1en := enSite.RegularPages[0]
permalink, err := doc1en.Permalink()
assert.NoError(t, err, "permalink call failed")
assert.Equal(t, "http://example.com/blog/en/sect/doc1-slug/", permalink, "invalid doc1.en permalink")
assert.Len(t, doc1en.Translations(), 1, "doc1-en should have one translation, excluding itself")
- doc2 := enSite.regularPages[1]
+ doc2 := enSite.RegularPages[1]
permalink, err = doc2.Permalink()
assert.NoError(t, err, "permalink call failed")
assert.Equal(t, "http://example.com/blog/en/sect/doc2/", permalink, "invalid doc2 permalink")
- doc3 := enSite.regularPages[2]
+ doc3 := enSite.RegularPages[2]
permalink, err = doc3.Permalink()
assert.NoError(t, err, "permalink call failed")
// Note that /superbob is a custom URL set in frontmatter.
@@ -276,10 +276,10 @@
frSite := sites.Sites[1]
assert.Equal(t, "fr", frSite.Language.Lang)
- assert.Len(t, frSite.regularPages, 3, "should have 3 pages")
+ assert.Len(t, frSite.RegularPages, 3, "should have 3 pages")
assert.Len(t, frSite.AllPages, 28, "should have 28 total pages (including translations and nodes)")
- for _, frenchPage := range frSite.regularPages {+ for _, frenchPage := range frSite.RegularPages {assert.Equal(t, "fr", frenchPage.Lang())
}
@@ -386,8 +386,8 @@
enSite := sites.Sites[0]
frSite := sites.Sites[1]
- require.Len(t, enSite.regularPages, 4)
- require.Len(t, frSite.regularPages, 3)
+ require.Len(t, enSite.RegularPages, 4)
+ require.Len(t, frSite.RegularPages, 3)
// Verify translations
assertFileContent(t, "public/en/sect/doc1-slug/index.html", true, "Hello")
@@ -413,7 +413,7 @@
nil,
[]fsnotify.Event{{Name: "content/sect/doc2.en.md", Op: fsnotify.Remove}}, func(t *testing.T) {- require.Len(t, enSite.regularPages, 3, "1 en removed")
+ require.Len(t, enSite.RegularPages, 3, "1 en removed")
// Check build stats
require.Equal(t, 1, enSite.draftCount, "Draft")
@@ -436,12 +436,12 @@
{Name: "content/new1.fr.md", Op: fsnotify.Create},},
func(t *testing.T) {- require.Len(t, enSite.regularPages, 5)
+ require.Len(t, enSite.RegularPages, 5)
require.Len(t, enSite.AllPages, 30)
- require.Len(t, frSite.regularPages, 4)
- require.Equal(t, "new_fr_1", frSite.regularPages[3].Title)
- require.Equal(t, "new_en_2", enSite.regularPages[0].Title)
- require.Equal(t, "new_en_1", enSite.regularPages[1].Title)
+ require.Len(t, frSite.RegularPages, 4)
+ require.Equal(t, "new_fr_1", frSite.RegularPages[3].Title)
+ require.Equal(t, "new_en_2", enSite.RegularPages[0].Title)
+ require.Equal(t, "new_en_1", enSite.RegularPages[1].Title)
rendered := readDestination(t, "public/en/new1/index.html")
require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
@@ -456,7 +456,7 @@
},
[]fsnotify.Event{{Name: "content/sect/doc1.en.md", Op: fsnotify.Write}}, func(t *testing.T) {- require.Len(t, enSite.regularPages, 5)
+ require.Len(t, enSite.RegularPages, 5)
doc1 := readDestination(t, "public/en/sect/doc1-slug/index.html")
require.True(t, strings.Contains(doc1, "CHANGED"), doc1)
@@ -474,8 +474,8 @@
{Name: "content/new1.en.md", Op: fsnotify.Rename},},
func(t *testing.T) {- require.Len(t, enSite.regularPages, 5, "Rename")
- require.Equal(t, "new_en_1", enSite.regularPages[1].Title)
+ require.Len(t, enSite.RegularPages, 5, "Rename")
+ require.Equal(t, "new_en_1", enSite.RegularPages[1].Title)
rendered := readDestination(t, "public/en/new1renamed/index.html")
require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
}},
@@ -489,9 +489,9 @@
},
[]fsnotify.Event{{Name: "layouts/_default/single.html", Op: fsnotify.Write}}, func(t *testing.T) {- require.Len(t, enSite.regularPages, 5)
+ require.Len(t, enSite.RegularPages, 5)
require.Len(t, enSite.AllPages, 30)
- require.Len(t, frSite.regularPages, 4)
+ require.Len(t, frSite.RegularPages, 4)
doc1 := readDestination(t, "public/en/sect/doc1-slug/index.html")
require.True(t, strings.Contains(doc1, "Template Changed"), doc1)
},
@@ -506,9 +506,9 @@
},
[]fsnotify.Event{{Name: "i18n/fr.yaml", Op: fsnotify.Write}}, func(t *testing.T) {- require.Len(t, enSite.regularPages, 5)
+ require.Len(t, enSite.RegularPages, 5)
require.Len(t, enSite.AllPages, 30)
- require.Len(t, frSite.regularPages, 4)
+ require.Len(t, frSite.RegularPages, 4)
docEn := readDestination(t, "public/en/sect/doc1-slug/index.html")
require.True(t, strings.Contains(docEn, "Hello"), "No Hello")
docFr := readDestination(t, "public/fr/sect/doc1/index.html")
@@ -530,9 +530,9 @@
{Name: "layouts/shortcodes/shortcode.html", Op: fsnotify.Write},},
func(t *testing.T) {- require.Len(t, enSite.regularPages, 5)
+ require.Len(t, enSite.RegularPages, 5)
require.Len(t, enSite.AllPages, 30)
- require.Len(t, frSite.regularPages, 4)
+ require.Len(t, frSite.RegularPages, 4)
assertFileContent(t, "public/fr/sect/doc1/index.html", true, "Single", "Modified Shortcode: Salut")
assertFileContent(t, "public/en/sect/doc1-slug/index.html", true, "Single", "Modified Shortcode: Hello")
},
@@ -626,12 +626,12 @@
require.Len(t, homeEn.Translations(), 4)
require.Equal(t, "sv", homeEn.Translations()[0].Lang())
- require.Len(t, enSite.regularPages, 4)
- require.Len(t, frSite.regularPages, 3)
+ require.Len(t, enSite.RegularPages, 4)
+ require.Len(t, frSite.RegularPages, 3)
// Veriy Swedish site
- require.Len(t, svSite.regularPages, 1)
- svPage := svSite.regularPages[0]
+ require.Len(t, svSite.RegularPages, 1)
+ svPage := svSite.RegularPages[0]
require.Equal(t, "Swedish Contentfile", svPage.Title)
require.Equal(t, "sv", svPage.Lang())
require.Len(t, svPage.Translations(), 2)
--- a/hugolib/menu.go
+++ b/hugolib/menu.go
@@ -21,8 +21,6 @@
"github.com/spf13/cast"
)
-// TODO(bep) np menu entries in section content etc.?
-
// MenuEntry represents a menu item defined in either Page front matter
// or in the site config.
type MenuEntry struct {--- a/hugolib/menu_test.go
+++ b/hugolib/menu_test.go
@@ -208,7 +208,7 @@
s := setupMenuTests(t, menuPageSources)
- assert.Equal(t, 3, len(s.regularPages), "Not enough pages")
+ assert.Equal(t, 3, len(s.RegularPages), "Not enough pages")
me1 := findTestMenuEntryByID(s, "m1", "i1")
me2 := findTestMenuEntryByID(s, "m1", "i2")
@@ -246,7 +246,7 @@
s := setupMenuTests(t, menuPageSources)
- assert.Equal(t, 3, len(s.regularPages), "Not enough pages")
+ assert.Equal(t, 3, len(s.RegularPages), "Not enough pages")
me1 := findTestMenuEntryByName(s, "m1", "n1")
me2 := findTestMenuEntryByName(s, "m1", "n2")
@@ -264,13 +264,13 @@
s := setupMenuTests(t, menuPageSources)
- if len(s.regularPages) != 3 {- t.Fatalf("Posts not created, expected 3 got %d", len(s.regularPages))+ if len(s.RegularPages) != 3 {+ t.Fatalf("Posts not created, expected 3 got %d", len(s.RegularPages))}
- first := s.regularPages[0]
- second := s.regularPages[1]
- third := s.regularPages[2]
+ first := s.RegularPages[0]
+ second := s.RegularPages[1]
+ third := s.RegularPages[2]
pOne := findTestMenuEntryByName(s, "p_one", "One")
pTwo := findTestMenuEntryByID(s, "p_two", "Two")
@@ -290,6 +290,10 @@
{"p_one", third, pTwo, false, false}, } {+ if i != 4 {+ continue
+ }
+
isMenuCurrent := this.page.IsMenuCurrent(this.menu, this.menuItem)
hasMenuCurrent := this.page.HasMenuCurrent(this.menu, this.menuItem)
@@ -358,9 +362,9 @@
{Name: filepath.FromSlash("sect/yaml1.md"), Content: ps1}, {Name: filepath.FromSlash("sect/yaml2.md"), Content: ps2}})- p1 := s.regularPages[0]
+ p1 := s.RegularPages[0]
assert.Len(t, p1.Menus(), 2, "List YAML")
- p2 := s.regularPages[1]
+ p2 := s.RegularPages[1]
assert.Len(t, p2.Menus(), 2, "Map YAML")
}
@@ -406,14 +410,14 @@
viper.Set("canonifyURLs", canonifyURLs)s := setupMenuTests(t, menuPageSectionsSources)
- assert.Equal(t, 3, len(s.Sections))
+ require.Equal(t, 3, len(s.Sections))
firstSectionPages := s.Sections["first"]
- assert.Equal(t, 2, len(firstSectionPages))
+ require.Equal(t, 2, len(firstSectionPages))
secondSectionPages := s.Sections["second-section"]
- assert.Equal(t, 1, len(secondSectionPages))
+ require.Equal(t, 1, len(secondSectionPages))
fishySectionPages := s.Sections["fish-and-chips"]
- assert.Equal(t, 1, len(fishySectionPages))
+ require.Equal(t, 1, len(fishySectionPages))
nodeFirst := s.getPage(KindSection, "first")
require.NotNil(t, nodeFirst)
@@ -426,33 +430,33 @@
secondSectionMenuEntry := findTestMenuEntryByID(s, "spm", "second-section")
fishySectionMenuEntry := findTestMenuEntryByID(s, "spm", "Fish and Chips")
- assert.NotNil(t, firstSectionMenuEntry)
- assert.NotNil(t, secondSectionMenuEntry)
- assert.NotNil(t, nodeFirst)
- assert.NotNil(t, nodeSecond)
- assert.NotNil(t, fishySectionMenuEntry)
- assert.NotNil(t, nodeFishy)
+ require.NotNil(t, firstSectionMenuEntry)
+ require.NotNil(t, secondSectionMenuEntry)
+ require.NotNil(t, nodeFirst)
+ require.NotNil(t, nodeSecond)
+ require.NotNil(t, fishySectionMenuEntry)
+ require.NotNil(t, nodeFishy)
- assert.True(t, nodeFirst.IsMenuCurrent("spm", firstSectionMenuEntry))- assert.False(t, nodeFirst.IsMenuCurrent("spm", secondSectionMenuEntry))- assert.False(t, nodeFirst.IsMenuCurrent("spm", fishySectionMenuEntry))- assert.True(t, nodeFishy.IsMenuCurrent("spm", fishySectionMenuEntry))- assert.Equal(t, "Fish and Chips", fishySectionMenuEntry.Name)
+ require.True(t, nodeFirst.IsMenuCurrent("spm", firstSectionMenuEntry))+ require.False(t, nodeFirst.IsMenuCurrent("spm", secondSectionMenuEntry))+ require.False(t, nodeFirst.IsMenuCurrent("spm", fishySectionMenuEntry))+ require.True(t, nodeFishy.IsMenuCurrent("spm", fishySectionMenuEntry))+ require.Equal(t, "Fish and Chips", fishySectionMenuEntry.Name)
for _, p := range firstSectionPages {- assert.True(t, p.Page.HasMenuCurrent("spm", firstSectionMenuEntry))- assert.False(t, p.Page.HasMenuCurrent("spm", secondSectionMenuEntry))+ require.True(t, p.Page.HasMenuCurrent("spm", firstSectionMenuEntry))+ require.False(t, p.Page.HasMenuCurrent("spm", secondSectionMenuEntry))}
for _, p := range secondSectionPages {- assert.False(t, p.Page.HasMenuCurrent("spm", firstSectionMenuEntry))- assert.True(t, p.Page.HasMenuCurrent("spm", secondSectionMenuEntry))+ require.False(t, p.Page.HasMenuCurrent("spm", firstSectionMenuEntry))+ require.True(t, p.Page.HasMenuCurrent("spm", secondSectionMenuEntry))}
for _, p := range fishySectionPages {- assert.False(t, p.Page.HasMenuCurrent("spm", firstSectionMenuEntry))- assert.False(t, p.Page.HasMenuCurrent("spm", secondSectionMenuEntry))- assert.True(t, p.Page.HasMenuCurrent("spm", fishySectionMenuEntry))+ require.False(t, p.Page.HasMenuCurrent("spm", firstSectionMenuEntry))+ require.False(t, p.Page.HasMenuCurrent("spm", secondSectionMenuEntry))+ require.True(t, p.Page.HasMenuCurrent("spm", fishySectionMenuEntry))}
}
--- a/hugolib/node.go
+++ /dev/null
@@ -1,334 +1,0 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
- "fmt"
- "html/template"
- "path"
- "path/filepath"
- "strings"
- "sync"
- "time"
-
- jww "github.com/spf13/jwalterweatherman"
-
- "github.com/spf13/hugo/helpers"
-)
-
-// TODO(bep) np clean up node vs page
-
-type Node struct {- RSSLink template.HTML
- Site *SiteInfo `json:"-"`
- // layout string
- Data map[string]interface{}- Title string
- Description string
- Keywords []string
- Params map[string]interface{}- Date time.Time
- Lastmod time.Time
- Sitemap Sitemap
- URLPath
- paginator *Pager
- paginatorInit sync.Once
- scratch *Scratch
-
- language *helpers.Language
- languageInit sync.Once
- lang string
-}
-
-func (n *Node) Now() time.Time {- return time.Now()
-}
-
-func (n *Node) HasMenuCurrent(menuID string, inme *MenuEntry) bool {- if inme.HasChildren() {- me := MenuEntry{Name: n.Title, URL: n.URL()}-
- for _, child := range inme.Children {- if me.IsSameResource(child) {- return true
- }
- if n.HasMenuCurrent(menuID, child) {- return true
- }
- }
- }
-
- return false
-}
-
-func (n *Node) IsMenuCurrent(menuID string, inme *MenuEntry) bool {-
- me := MenuEntry{Name: n.Title, URL: n.Site.createNodeMenuEntryURL(n.URL())}-
- if !me.IsSameResource(inme) {- return false
- }
-
- // this resource may be included in several menus
- // search for it to make sure that it is in the menu with the given menuId
- if menu, ok := (*n.Site.Menus)[menuID]; ok {- for _, menuEntry := range *menu {- if menuEntry.IsSameResource(inme) {- return true
- }
-
- descendantFound := n.isSameAsDescendantMenu(inme, menuEntry)
- if descendantFound {- return descendantFound
- }
-
- }
- }
-
- return false
-}
-
-// Param is a convenience method to do lookups in Site's Params map.
-//
-// This method is also implemented on Page and SiteInfo.
-func (n *Node) Param(key interface{}) (interface{}, error) {- return n.Site.Param(key)
-}
-
-func (n *Node) Hugo() *HugoInfo {- return hugoInfo
-}
-
-func (n *Node) isSameAsDescendantMenu(inme *MenuEntry, parent *MenuEntry) bool {- if parent.HasChildren() {- for _, child := range parent.Children {- if child.IsSameResource(inme) {- return true
- }
- descendantFound := n.isSameAsDescendantMenu(inme, child)
- if descendantFound {- return descendantFound
- }
- }
- }
- return false
-}
-
-func (n *Node) RSSlink() template.HTML {- return n.RSSLink
-}
-
-func (n *Node) Ref(ref string) (string, error) {- return n.Site.Ref(ref, nil)
-}
-
-func (n *Node) RelRef(ref string) (string, error) {- return n.Site.RelRef(ref, nil)
-}
-
-type URLPath struct {- URL string
- Permalink string
- Slug string
- Section string
-}
-
-func (n *Node) URL() string {- return n.addLangPathPrefix(n.URLPath.URL)
-}
-
-func (n *Node) Permalink() string {- return n.Site.permalink(n.URL())
-}
-
-// Scratch returns the writable context associated with this Node.
-func (n *Node) Scratch() *Scratch {- if n.scratch == nil {- n.scratch = newScratch()
- }
- return n.scratch
-}
-
-func (n *Node) Language() *helpers.Language {- n.initLanguage()
- return n.language
-}
-
-func (n *Node) Lang() string {- // When set, Language can be different from lang in the case where there is a
- // content file (doc.sv.md) with language indicator, but there is no language
- // config for that language. Then the language will fall back on the site default.
- if n.Language() != nil {- return n.Language().Lang
- }
- return n.lang
-}
-
-func (p *Page) isTranslation(candidate *Page) bool {- if p == candidate || p.Kind != candidate.Kind {- return false
- }
-
- if p.lang != candidate.lang || p.language != p.language {- return false
- }
-
- if p.Kind == KindPage || p.Kind == kindUnknown {- panic("Node type not currently supported for this op")- }
-
- // At this point, we know that this is a traditional Node (hoe page, section, taxonomy)
- // It represents the same node, but different language, if the sections is the same.
- if len(p.sections) != len(candidate.sections) {- return false
- }
-
- for i := 0; i < len(p.sections); i++ {- if p.sections[i] != candidate.sections[i] {- return false
- }
- }
-
- return true
-
-}
-
-func (n *Node) shouldAddLanguagePrefix() bool {- if !n.Site.IsMultiLingual() {- return false
- }
-
- if n.Lang() == "" {- return false
- }
-
- if !n.Site.defaultContentLanguageInSubdir && n.Lang() == n.Site.multilingual.DefaultLang.Lang {- return false
- }
-
- return true
-}
-
-func (n *Node) initLanguage() {- n.languageInit.Do(func() {- if n.language != nil {- return
- }
- pageLang := n.lang
- ml := n.Site.multilingual
- if ml == nil {- panic("Multilanguage not set")- }
- if pageLang == "" {- n.language = ml.DefaultLang
- return
- }
-
- language := ml.Language(pageLang)
-
- if language == nil {- // It can be a file named stefano.chiodino.md.
- jww.WARN.Printf("Page language (if it is that) not found in multilang setup: %s.", pageLang)- language = ml.DefaultLang
- }
-
- n.language = language
- })
-}
-
-func (n *Node) LanguagePrefix() string {- return n.Site.LanguagePrefix
-}
-
-func (n *Node) addLangPathPrefix(outfile string) string {- return n.addLangPathPrefixIfFlagSet(outfile, n.shouldAddLanguagePrefix())
-}
-
-func (n *Node) addLangPathPrefixIfFlagSet(outfile string, should bool) string {- if helpers.IsAbsURL(outfile) {- return outfile
- }
-
- if !should {- return outfile
- }
-
- hadSlashSuffix := strings.HasSuffix(outfile, "/")
-
- outfile = "/" + path.Join(n.Lang(), outfile)
- if hadSlashSuffix {- outfile += "/"
- }
- return outfile
-}
-
-func (n *Node) addLangFilepathPrefix(outfile string) string {- if outfile == "" {- outfile = helpers.FilePathSeparator
- }
- if !n.shouldAddLanguagePrefix() {- return outfile
- }
- return helpers.FilePathSeparator + filepath.Join(n.Lang(), outfile)
-}
-
-func sectionsFromFilename(filename string) []string {- dir, _ := filepath.Split(filename)
- dir = strings.TrimSuffix(dir, helpers.FilePathSeparator)
- sections := strings.Split(dir, helpers.FilePathSeparator)
- return sections
-}
-
-// TODO(bep) np node identificator
-func kindFromFilename(filename string) string {- if !strings.Contains(filename, "_index") {- return KindPage
- }
-
- if strings.HasPrefix(filename, "_index") {- return KindHome
- }
-
- // We don't know enough yet to determine the type.
- return kindUnknown
-}
-
-func (p *Page) setNodeTypeVars(s *Site) {- // TODO(bep) np taxonomies etc.
- if p.Kind == kindUnknown {- // This is either a taxonomy list, taxonomy term or a section
- nodeType := s.nodeTypeFromSections(p.sections)
-
- if nodeType == kindUnknown {- panic(fmt.Sprintf("Unable to determine node type from %q", p.sections))- }
-
- p.Kind = nodeType
- }
- // TODO(bep) np node URL
- // Set Node URL
- switch p.Kind {- case KindHome:
- p.URLPath.URL = ""
- case KindSection:
- p.URLPath.URL = p.sections[0]
- case KindTaxonomy:
- p.URLPath.URL = path.Join(p.sections...)
- case KindTaxonomyTerm:
- p.URLPath.URL = path.Join(p.sections...)
- }
-
- p.site = s
-
-}
--- a/hugolib/node_as_page_test.go
+++ b/hugolib/node_as_page_test.go
@@ -84,6 +84,7 @@
require.True(t, home.IsHome())
require.True(t, home.IsNode())
require.False(t, home.IsPage())
+ require.True(t, home.Path() != "")
section2 := nodes[3]
require.Equal(t, "Section2", section2.Title)
@@ -185,6 +186,7 @@
homePage := homePages[0]
require.Len(t, homePage.Data["Pages"], 9)
require.Len(t, homePage.Pages, 9) // Alias
+ require.True(t, homePage.Path() == "")
assertFileContent(t, filepath.Join("public", "index.html"), false,"Index Title: Hugo Rocks!",
@@ -280,8 +282,6 @@
// The en language has content pages
- // TODO(bep) np alias URL check
-
assertFileContent(t, filepath.Join("public", "nn", "index.html"), true,"Index Title: Hugo på norsk")
assertFileContent(t, filepath.Join("public", "en", "index.html"), true,@@ -582,7 +582,7 @@
Taxonomy Terms Title: {{ .Title }} Taxonomy Terms Content: {{ .Content }} {{ range $key, $value := .Data.Terms }}- k/v: {{ $key }} / {{ printf "%=v" $value }}+ k/v: {{ $key }} / {{ printf "%s" $value }} {{ end }} Date: {{ .Date.Format "2006-01-02" }} Lastmod: {{ .Lastmod.Format "2006-01-02" }}--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -86,28 +86,36 @@
// This collection will be nil for regular pages.
Pages Pages
- Params map[string]interface{}- Content template.HTML
- Summary template.HTML
- Aliases []string
- Status string
- Images []Image
- Videos []Video
- TableOfContents template.HTML
- Truncated bool
- Draft bool
- PublishDate time.Time
- ExpiryDate time.Time
- Markup string
- translations Pages
- extension string
- contentType string
- renderable bool
+ Params map[string]interface{}+ Content template.HTML
+ Summary template.HTML
+ Aliases []string
+ Status string
+ Images []Image
+ Videos []Video
+
+ TableOfContents template.HTML
+
+ Truncated bool
+ Draft bool
+
+ PublishDate time.Time
+ ExpiryDate time.Time
+
+ Markup string
+
+ translations Pages
+
+ extension string
+ contentType string
+ renderable bool
+
Layout string
layoutsCalculated []string
- linkTitle string
- frontmatter []byte
+ linkTitle string
+ frontmatter []byte
+
// rawContent isn't "raw" as in the same as in the content file.
// Hugo cares about memory consumption, so we make changes to it to do
// markdown rendering etc., but it is "raw enough" so we can do rebuilds
@@ -132,9 +140,6 @@
Source
Position `json:"-"`
- // TODO(bep) np pointer, or remove
- Node
-
GitInfo *gitmap.GitInfo
// This was added as part of getting the Nodes (taxonomies etc.) to work as
@@ -151,6 +156,32 @@
// TODO(bep) np Site added to page, keep?
site *Site
+
+ // Pulled over from Node. TODO(bep) np reorg and group (embed)
+
+ Site *SiteInfo `json:"-"`
+
+ Title string
+ Description string
+ Keywords []string
+ Data map[string]interface{}+
+ Date time.Time
+ Lastmod time.Time
+
+ Sitemap Sitemap
+
+ RSSLink template.HTML
+ URLPath
+
+ paginator *Pager
+ paginatorInit sync.Once
+
+ scratch *Scratch
+
+ language *helpers.Language
+ languageInit sync.Once
+ lang string
}
// IsNode returns whether this is an item of one of the list types in Hugo,
@@ -207,6 +238,10 @@
type Pages []*Page
+func (p Pages) String() string {+ return fmt.Sprintf("Pages(%d)", len(p))+}
+
func (ps Pages) FindPagePosByFilePath(inPath string) int { for i, x := range ps { if x.Source.Path() == inPath {@@ -300,14 +335,6 @@
return p.Source.UniqueID()
}
-func (p *Page) Ref(ref string) (string, error) {- return p.Node.Site.Ref(ref, p)
-}
-
-func (p *Page) RelRef(ref string) (string, error) {- return p.Node.Site.RelRef(ref, p)
-}
-
// for logging
func (p *Page) lineNumRawContentStart() int { return bytes.Count(p.frontmatter, []byte("\n")) + 1@@ -450,10 +477,10 @@
var fileFn helpers.FileResolverFunc
if p.getRenderingConfig().SourceRelativeLinksEval { fn = func(ref string) (string, error) {- return p.Node.Site.SourceRelativeLink(ref, p)
+ return p.Site.SourceRelativeLink(ref, p)
}
fileFn = func(ref string) (string, error) {- return p.Node.Site.SourceRelativeLinkFile(ref, p)
+ return p.Site.SourceRelativeLinkFile(ref, p)
}
}
return helpers.RenderBytes(&helpers.RenderingContext{@@ -483,10 +510,10 @@
func newPage(filename string) *Page { page := Page{- Kind: kindFromFilename(filename),
- contentType: "",
- Source: Source{File: *source.NewFile(filename)},- Node: Node{Keywords: []string{}, Sitemap: Sitemap{Priority: -1}},+ Kind: kindFromFilename(filename),
+ contentType: "",
+ Source: Source{File: *source.NewFile(filename)},+ Keywords: []string{}, Sitemap: Sitemap{Priority: -1}, Params: make(map[string]interface{}),translations: make(Pages, 0),
sections: sectionsFromFilename(filename),
@@ -778,7 +805,7 @@
func (p *Page) Permalink() (string, error) {// TODO(bep) np permalink
if p.IsNode() {- return p.Node.Permalink(), nil
+ return p.Site.permalink(p.URL()), nil
}
link, err := p.permalink()
if err != nil {@@ -788,6 +815,10 @@
}
func (p *Page) URL() string {+ // TODO(bep np URL
+ if p.IsNode() {+ return p.addLangPathPrefix(p.URLPath.URL)
+ }
if p.URLPath.URL != "" {// This is the url set in front matter
return p.URLPath.URL
@@ -1013,50 +1044,111 @@
return nil
}
-func (p *Page) HasMenuCurrent(menu string, me *MenuEntry) bool {- // TODO(bep) np menu
- if p.IsNode() {- return p.Node.HasMenuCurrent(menu, me)
- }
- menus := p.Menus()
+func (p *Page) HasMenuCurrent(menuID string, me *MenuEntry) bool {+
sectionPagesMenu := helpers.Config().GetString("SectionPagesMenu")// page is labeled as "shadow-member" of the menu with the same identifier as the section
- if sectionPagesMenu != "" && p.Section() != "" && sectionPagesMenu == menu && p.Section() == me.Identifier {+ if sectionPagesMenu != "" && p.Section() != "" && sectionPagesMenu == menuID && p.Section() == me.Identifier {return true
}
- if m, ok := menus[menu]; ok {- if me.HasChildren() {- for _, child := range me.Children {- if child.IsEqual(m) {- return true
- }
- if p.HasMenuCurrent(menu, child) {- return true
- }
+ if !me.HasChildren() {+ return false
+ }
+
+ menus := p.Menus()
+
+ if m, ok := menus[menuID]; ok {+
+ for _, child := range me.Children {+ if child.IsEqual(m) {+ return true
}
+ if p.HasMenuCurrent(menuID, child) {+ return true
+ }
}
+
}
+ if p.IsPage() {+ return false
+ }
+
+ // The following logic is kept from back when Hugo had both Page and Node types.
+ // TODO(bep) consolidate / clean
+ nme := MenuEntry{Name: p.Title, URL: p.URL()}+
+ for _, child := range me.Children {+ if nme.IsSameResource(child) {+ return true
+ }
+ if p.HasMenuCurrent(menuID, child) {+ return true
+ }
+ }
+
return false
}
-func (p *Page) IsMenuCurrent(menu string, inme *MenuEntry) bool {- // TODO(bep) np menu
- if p.IsNode() {- return p.Node.IsMenuCurrent(menu, inme)
- }
+func (p *Page) IsMenuCurrent(menuID string, inme *MenuEntry) bool {+
menus := p.Menus()
- if me, ok := menus[menu]; ok {- return me.IsEqual(inme)
+ if me, ok := menus[menuID]; ok {+ if me.IsEqual(inme) {+ return true
+ }
}
+ if p.IsPage() {+ return false
+ }
+
+ // The following logic is kept from back when Hugo had both Page and Node types.
+ // TODO(bep) consolidate / clean
+ me := MenuEntry{Name: p.Title, URL: p.Site.createNodeMenuEntryURL(p.URL())}+
+ if !me.IsSameResource(inme) {+ return false
+ }
+
+ // this resource may be included in several menus
+ // search for it to make sure that it is in the menu with the given menuId
+ if menu, ok := (*p.Site.Menus)[menuID]; ok {+ for _, menuEntry := range *menu {+ if menuEntry.IsSameResource(inme) {+ return true
+ }
+
+ descendantFound := p.isSameAsDescendantMenu(inme, menuEntry)
+ if descendantFound {+ return descendantFound
+ }
+
+ }
+ }
+
return false
}
+func (p *Page) isSameAsDescendantMenu(inme *MenuEntry, parent *MenuEntry) bool {+ if parent.HasChildren() {+ for _, child := range parent.Children {+ if child.IsSameResource(inme) {+ return true
+ }
+ descendantFound := p.isSameAsDescendantMenu(inme, child)
+ if descendantFound {+ return descendantFound
+ }
+ }
+ }
+ return false
+}
+
func (p *Page) Menus() PageMenus { p.pageMenusInit.Do(func() { p.pageMenus = PageMenus{}@@ -1253,7 +1345,6 @@
func (p *Page) TargetPath() (outfile string) {- // TODO(bep) np
switch p.Kind {case KindHome:
return p.addLangFilepathPrefix(helpers.FilePathSeparator)
@@ -1416,7 +1507,7 @@
// the paginators etc., we do it manually here.
// TODO(bep) np do better
func (p *Page) copy() *Page {- c := &Page{Kind: p.Kind, Node: Node{Site: p.Site}}+ c := &Page{Kind: p.Kind, Site: p.Site}c.Title = p.Title
c.Data = p.Data
c.Date = p.Date
@@ -1425,4 +1516,218 @@
c.lang = p.lang
c.URLPath = p.URLPath
return c
+}
+
+// TODO(bep) np these are pulled over from Node. Needs regrouping / embed
+
+func (p *Page) Now() time.Time {+ return time.Now()
+}
+
+func (p *Page) Hugo() *HugoInfo {+ return hugoInfo
+}
+
+func (p *Page) RSSlink() template.HTML {+ // TODO(bep) we cannot have two of these
+ helpers.Deprecated(".Page", "RSSlink", "RSSLink")+ return p.RSSLink
+}
+
+func (p *Page) Ref(ref string) (string, error) {+ return p.Site.Ref(ref, nil)
+}
+
+func (p *Page) RelRef(ref string) (string, error) {+ return p.Site.RelRef(ref, nil)
+}
+
+func (p *Page) String() string {+ return fmt.Sprintf("Page(%q)", p.Title)+}
+
+type URLPath struct {+ URL string
+ Permalink string
+ Slug string
+ Section string
+}
+
+// Scratch returns the writable context associated with this Page.
+func (p *Page) Scratch() *Scratch {+ if p.scratch == nil {+ p.scratch = newScratch()
+ }
+ return p.scratch
+}
+
+func (p *Page) Language() *helpers.Language {+ p.initLanguage()
+ return p.language
+}
+
+func (p *Page) Lang() string {+ // When set, Language can be different from lang in the case where there is a
+ // content file (doc.sv.md) with language indicator, but there is no language
+ // config for that language. Then the language will fall back on the site default.
+ if p.Language() != nil {+ return p.Language().Lang
+ }
+ return p.lang
+}
+
+func (p *Page) isTranslation(candidate *Page) bool {+ if p == candidate || p.Kind != candidate.Kind {+ return false
+ }
+
+ if p.lang != candidate.lang || p.language != p.language {+ return false
+ }
+
+ if p.Kind == KindPage || p.Kind == kindUnknown {+ panic("Node type not currently supported for this op")+ }
+
+ // At this point, we know that this is a traditional Node (home page, section, taxonomy)
+ // It represents the same node, but different language, if the sections is the same.
+ if len(p.sections) != len(candidate.sections) {+ return false
+ }
+
+ for i := 0; i < len(p.sections); i++ {+ if p.sections[i] != candidate.sections[i] {+ return false
+ }
+ }
+
+ return true
+
+}
+
+func (p *Page) shouldAddLanguagePrefix() bool {+ if !p.Site.IsMultiLingual() {+ return false
+ }
+
+ if p.Lang() == "" {+ return false
+ }
+
+ if !p.Site.defaultContentLanguageInSubdir && p.Lang() == p.Site.multilingual.DefaultLang.Lang {+ return false
+ }
+
+ return true
+}
+
+func (p *Page) initLanguage() {+ p.languageInit.Do(func() {+ if p.language != nil {+ return
+ }
+ pageLang := p.lang
+ ml := p.Site.multilingual
+ if ml == nil {+ panic("Multilanguage not set")+ }
+ if pageLang == "" {+ p.language = ml.DefaultLang
+ return
+ }
+
+ language := ml.Language(pageLang)
+
+ if language == nil {+ // It can be a file named stefano.chiodino.md.
+ jww.WARN.Printf("Page language (if it is that) not found in multilang setup: %s.", pageLang)+ language = ml.DefaultLang
+ }
+
+ p.language = language
+ })
+}
+
+func (p *Page) LanguagePrefix() string {+ return p.Site.LanguagePrefix
+}
+
+func (p *Page) addLangPathPrefix(outfile string) string {+ return p.addLangPathPrefixIfFlagSet(outfile, p.shouldAddLanguagePrefix())
+}
+
+func (p *Page) addLangPathPrefixIfFlagSet(outfile string, should bool) string {+ if helpers.IsAbsURL(outfile) {+ return outfile
+ }
+
+ if !should {+ return outfile
+ }
+
+ hadSlashSuffix := strings.HasSuffix(outfile, "/")
+
+ outfile = "/" + path.Join(p.Lang(), outfile)
+ if hadSlashSuffix {+ outfile += "/"
+ }
+ return outfile
+}
+
+func (p *Page) addLangFilepathPrefix(outfile string) string {+ if outfile == "" {+ outfile = helpers.FilePathSeparator
+ }
+ if !p.shouldAddLanguagePrefix() {+ return outfile
+ }
+ return helpers.FilePathSeparator + filepath.Join(p.Lang(), outfile)
+}
+
+func sectionsFromFilename(filename string) []string {+ dir, _ := filepath.Split(filename)
+ dir = strings.TrimSuffix(dir, helpers.FilePathSeparator)
+ sections := strings.Split(dir, helpers.FilePathSeparator)
+ return sections
+}
+
+func kindFromFilename(filename string) string {+ if !strings.Contains(filename, "_index") {+ return KindPage
+ }
+
+ if strings.HasPrefix(filename, "_index") {+ return KindHome
+ }
+
+ // We don't know enough yet to determine the type.
+ return kindUnknown
+}
+
+func (p *Page) setNodeTypeVars(s *Site) {+ if p.Kind == kindUnknown {+ // This is either a taxonomy list, taxonomy term or a section
+ nodeType := s.nodeTypeFromSections(p.sections)
+
+ if nodeType == kindUnknown {+ panic(fmt.Sprintf("Unable to determine node type from %q", p.sections))+ }
+
+ p.Kind = nodeType
+ }
+ // TODO(bep) np node URL
+ // Set Node URL
+ switch p.Kind {+ case KindHome:
+ p.URLPath.URL = ""
+ case KindSection:
+ p.URLPath.URL = p.sections[0]
+ case KindTaxonomy:
+ p.URLPath.URL = path.Join(p.sections...)
+ case KindTaxonomyTerm:
+ p.URLPath.URL = path.Join(p.sections...)
+ }
+
+ p.site = s
+
}
--- a/hugolib/pageSort_test.go
+++ b/hugolib/pageSort_test.go
@@ -147,13 +147,12 @@
for i := 0; i < num; i++ { pages[i] = &Page{- Node: Node{- URLPath: URLPath{- Section: "z",
- URL: fmt.Sprintf("http://base/x/y/p%d.html", i),- },
- Site: &info,
+
+ URLPath: URLPath{+ Section: "z",
+ URL: fmt.Sprintf("http://base/x/y/p%d.html", i),},
+ Site: &info,
Source: Source{File: *source.NewFile(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", i)))},}
w := 5
--- a/hugolib/page_collections.go
+++ b/hugolib/page_collections.go
@@ -32,8 +32,7 @@
// A convenience cache for the regular pages.
// This is for the current language only.
- // TODO(bep) np consider exporting this
- regularPages Pages
+ RegularPages Pages
// Includes absolute all pages (of all types), including drafts etc.
rawAllPages Pages
@@ -41,7 +40,7 @@
func (c *PageCollections) refreshPageCaches() {c.indexPages = c.findPagesByNodeTypeNotIn(KindPage, c.Pages)
- c.regularPages = c.findPagesByNodeTypeIn(KindPage, c.Pages)
+ c.RegularPages = c.findPagesByNodeTypeIn(KindPage, c.Pages)
// TODO(bep) np remove eventually
for _, n := range c.Pages {--- a/hugolib/page_permalink_test.go
+++ b/hugolib/page_permalink_test.go
@@ -67,13 +67,11 @@
p := &Page{Kind: KindPage,
- Node: Node{- URLPath: URLPath{- Section: "z",
- URL: test.url,
- },
- Site: &info,
+ URLPath: URLPath{+ Section: "z",
+ URL: test.url,
},
+ Site: &info,
Source: Source{File: *source.NewFile(filepath.FromSlash(test.file))},}
--- a/hugolib/page_test.go
+++ b/hugolib/page_test.go
@@ -630,9 +630,9 @@
t.Fatalf("Failed to build site: %s", err)}
- require.Len(t, s.regularPages, len(pageSources))
+ require.Len(t, s.RegularPages, len(pageSources))
- assertFunc(t, e.ext, s.regularPages)
+ assertFunc(t, e.ext, s.RegularPages)
}
@@ -740,9 +740,9 @@
t.Fatalf("Failed to build site: %s", err)}
- require.Len(t, s.regularPages, 1)
+ require.Len(t, s.RegularPages, 1)
- p := s.regularPages[0]
+ p := s.RegularPages[0]
if p.Summary != template.HTML("<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup class=\"footnote-ref\" id=\"fnref:1\"><a rel=\"footnote\" href=\"#fn:1\">1</a></sup>\n</p>") { t.Fatalf("Got summary:\n%q", p.Summary)@@ -788,9 +788,9 @@
t.Fatalf("Failed to build site: %s", err)}
- require.Len(t, s.regularPages, 1)
+ require.Len(t, s.RegularPages, 1)
- p := s.regularPages[0]
+ p := s.RegularPages[0]
checkPageContent(t, p, "<p>first line.<br />\nsecond line.</p>\n\n<p>fourth line.</p>\n")
}
@@ -802,9 +802,9 @@
t.Fatalf("Failed to build site: %s", err)}
- require.Len(t, s.regularPages, 1)
+ require.Len(t, s.RegularPages, 1)
- p := s.regularPages[0]
+ p := s.RegularPages[0]
checkPageContent(t, p, "\n\n<p>For some moments the old man did not reply. He stood with bowed head, buried in deep thought. But at last he spoke.</p>\n\n<h2 id=\"aa\">AA</h2>\n\n<p>I have no idea, of course, how long it took me to reach the limit of the plain,\nbut at last I entered the foothills, following a pretty little canyon upward\ntoward the mountains. Beside me frolicked a laughing brooklet, hurrying upon\nits noisy way down to the silent sea. In its quieter pools I discovered many\nsmall fish, of four-or five-pound weight I should imagine. In appearance,\nexcept as to size and color, they were not unlike the whale of our own seas. As\nI watched them playing about I discovered, not only that they suckled their\nyoung, but that at intervals they rose to the surface to breathe as well as to\nfeed upon certain grasses and a strange, scarlet lichen which grew upon the\nrocks just above the water line.</p>\n\n<h3 id=\"aaa\">AAA</h3>\n\n<p>I remember I felt an extraordinary persuasion that I was being played with,\nthat presently, when I was upon the very verge of safety, this mysterious\ndeath–as swift as the passage of light–would leap after me from the pit about\nthe cylinder and strike me down. ## BB</p>\n\n<h3 id=\"bbb\">BBB</h3>\n\n<p>“You’re a great Granser,” he cried delightedly, “always making believe them little marks mean something.”</p>\n")
checkPageTOC(t, p, "<nav id=\"TableOfContents\">\n<ul>\n<li>\n<ul>\n<li><a href=\"#aa\">AA</a>\n<ul>\n<li><a href=\"#aaa\">AAA</a></li>\n<li><a href=\"#bbb\">BBB</a></li>\n</ul></li>\n</ul></li>\n</ul>\n</nav>")
@@ -832,9 +832,9 @@
t.Fatalf("Failed to build site: %s", err)}
- require.Len(t, s.regularPages, 1)
+ require.Len(t, s.RegularPages, 1)
- p := s.regularPages[0]
+ p := s.RegularPages[0]
d, _ := time.Parse(time.RFC3339, "2013-05-17T16:59:30Z")
checkPageDate(t, p, d)
@@ -1145,10 +1145,10 @@
for _, test := range tests {p, _ := NewPageFrom(strings.NewReader(test.content), filepath.FromSlash(test.path))
info := newSiteInfo(siteBuilderCfg{language: helpers.NewDefaultLanguage()})- p.Node.Site = &info
+ p.Site = &info
if test.hasPermalink {- p.Node.Site.Permalinks = siteParmalinksSetting
+ p.Site.Permalinks = siteParmalinksSetting
}
expectedTargetPath := filepath.FromSlash(test.expected)
@@ -1263,7 +1263,7 @@
}{ {func(n *Page) bool { return n.IsNode() }}, {func(n *Page) bool { return !n.IsPage() }},- {func(n *Page) bool { return n.RSSlink() == "rssLink" }},+ {func(n *Page) bool { return n.RSSLink == "rssLink" }}, {func(n *Page) bool { return n.Scratch() != nil }}, {func(n *Page) bool { return n.Hugo() != nil }}, {func(n *Page) bool { return n.Now().Unix() == time.Now().Unix() }},@@ -1298,9 +1298,9 @@
t.Fatalf("Failed to build site: %s", err)}
- require.Len(t, s.regularPages, 1)
+ require.Len(t, s.RegularPages, 1)
- p := s.regularPages[0]
+ p := s.RegularPages[0]
checkPageTitle(t, p, "Simple")
}
--- a/hugolib/pagination.go
+++ b/hugolib/pagination.go
@@ -33,6 +33,10 @@
*paginator
}
+func (p Pager) String() string {+ return fmt.Sprintf("Pager %d", p.number)+}
+
type paginatedElement interface {Len() int
}
@@ -257,11 +261,11 @@
return split
}
-// Paginator gets this Node's paginator if it's already created.
+// Paginator gets this Page's paginator if it's already created.
// If it's not, one will be created with all pages in Data["Pages"].
-func (n *Page) Paginator(options ...interface{}) (*Pager, error) {- if !n.IsNode() {- return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", n.Kind, n.Title)+func (p *Page) Paginator(options ...interface{}) (*Pager, error) {+ if !p.IsNode() {+ return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.Title)}
pagerSize, err := resolvePagerSize(options...)
@@ -271,12 +275,12 @@
var initError error
- n.paginatorInit.Do(func() {- if n.paginator != nil {+ p.paginatorInit.Do(func() {+ if p.paginator != nil {return
}
- pagers, err := paginatePages(n.Data["Pages"], pagerSize, n.URL())
+ pagers, err := paginatePages(p.Data["Pages"], pagerSize, p.URL())
if err != nil {initError = err
@@ -284,10 +288,10 @@
if len(pagers) > 0 {// the rest of the nodes will be created later
- n.paginator = pagers[0]
- n.paginator.source = "paginator"
- n.paginator.options = options
- n.Site.addToPaginationPageCount(uint64(n.paginator.TotalPages()))
+ p.paginator = pagers[0]
+ p.paginator.source = "paginator"
+ p.paginator.options = options
+ p.Site.addToPaginationPageCount(uint64(p.paginator.TotalPages()))
}
})
@@ -296,7 +300,7 @@
return nil, initError
}
- return n.paginator, nil
+ return p.paginator, nil
}
// Paginate gets this Node's paginator if it's already created.
--- a/hugolib/pagination_test.go
+++ b/hugolib/pagination_test.go
@@ -457,13 +457,11 @@
info := newSiteInfo(siteBuilderCfg{baseURL: "http://base/", language: helpers.NewDefaultLanguage()}) for i := 0; i < num; i++ { pages[i] = &Page{- Node: Node{- URLPath: URLPath{- Section: "z",
- URL: fmt.Sprintf("http://base/x/y/p%d.html", i),- },
- Site: &info,
+ URLPath: URLPath{+ Section: "z",
+ URL: fmt.Sprintf("http://base/x/y/p%d.html", i),},
+ Site: &info,
Source: Source{File: *source.NewFile(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", i)))},}
w := 5
--- a/hugolib/shortcode_test.go
+++ b/hugolib/shortcode_test.go
@@ -69,9 +69,9 @@
t.Fatalf("No error from shortcode")}
- require.Len(t, h.Sites[0].regularPages, 1)
+ require.Len(t, h.Sites[0].RegularPages, 1)
- output := strings.TrimSpace(string(h.Sites[0].regularPages[0].Content))
+ output := strings.TrimSpace(string(h.Sites[0].RegularPages[0].Content))
if strings.HasPrefix(output, "<p>") {output = output[3:]
}
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -193,6 +193,10 @@
pathSpec *helpers.PathSpec
}
+func (s *SiteInfo) String() string {+ return fmt.Sprintf("Site(%q)", s.Title)+}
+
// Used in tests.
type siteBuilderCfg struct {--- a/hugolib/siteJSONEncode_test.go
+++ b/hugolib/siteJSONEncode_test.go
@@ -30,7 +30,7 @@
_, err := json.Marshal(s)
check(t, err)
- _, err = json.Marshal(s.regularPages[0])
+ _, err = json.Marshal(s.RegularPages[0])
check(t, err)
}
--- a/hugolib/site_render.go
+++ b/hugolib/site_render.go
@@ -99,7 +99,8 @@
aliasPath := p.addLangPathPrefix(helpers.PaginateAliasPath(path.Join(p.sections...), 1))
//TODO(bep) np node.permalink
- s.writeDestAlias(aliasPath, p.Node.Permalink(), nil)
+ link, _ := p.Permalink()
+ s.writeDestAlias(aliasPath, link, nil)
pagers := p.paginator.Pagers()
--- a/hugolib/site_test.go
+++ b/hugolib/site_test.go
@@ -91,9 +91,9 @@
t.Fatalf("Failed to build site: %s", err)}
- require.Len(t, s.regularPages, 1)
+ require.Len(t, s.RegularPages, 1)
- p := s.regularPages[0]
+ p := s.RegularPages[0]
err := s.renderThing(p, "foobar", nil)
if err == nil {@@ -142,7 +142,7 @@
// Testing Defaults.. Only draft:true and publishDate in the past should be rendered
s := siteSetup(t)
- if len(s.regularPages) != 1 {+ if len(s.RegularPages) != 1 { t.Fatal("Draft or Future dated content published unexpectedly")}
@@ -149,7 +149,7 @@
// only publishDate in the past should be rendered
viper.Set("buildDrafts", true)s = siteSetup(t)
- if len(s.regularPages) != 2 {+ if len(s.RegularPages) != 2 { t.Fatal("Future Dated Posts published unexpectedly")}
@@ -157,7 +157,7 @@
viper.Set("buildDrafts", false) viper.Set("buildFuture", true)s = siteSetup(t)
- if len(s.regularPages) != 2 {+ if len(s.RegularPages) != 2 { t.Fatal("Draft posts published unexpectedly")}
@@ -165,7 +165,7 @@
viper.Set("buildDrafts", true) viper.Set("buildFuture", true)s = siteSetup(t)
- if len(s.regularPages) != 4 {+ if len(s.RegularPages) != 4 { t.Fatal("Drafts or Future posts not included as expected")}
@@ -201,11 +201,11 @@
s := siteSetup(t)
if len(s.AllPages) != 1 {- if len(s.regularPages) > 1 {+ if len(s.RegularPages) > 1 { t.Fatal("Expired content published unexpectedly")}
- if len(s.regularPages) < 1 {+ if len(s.RegularPages) < 1 { t.Fatal("Valid content expired unexpectedly")}
}
@@ -285,7 +285,7 @@
t.Fatalf("Failed to build site: %s", err)}
- if len(s.regularPages) != 3 {+ if len(s.RegularPages) != 3 { t.Fatalf("Expected 3 got %d pages", len(s.AllPages))}
@@ -377,7 +377,7 @@
{filepath.FromSlash("public/ugly.html"), "\n\n<h1 id=\"title\">title</h1>\n\n<p>doc2 <em>content</em></p>\n"},}
- for _, p := range s.regularPages {+ for _, p := range s.RegularPages {assert.False(t, p.IsHome())
}
@@ -649,7 +649,7 @@
t.Errorf("Pages in unexpected order. Second should be '%s', got '%s'", "Three", s.Sections["sect"][1].Page.Title)}
- bydate := s.regularPages.ByDate()
+ bydate := s.RegularPages.ByDate()
if bydate[0].Title != "One" { t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bydate[0].Title)@@ -660,7 +660,7 @@
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rev[0].Title)}
- bypubdate := s.regularPages.ByPublishDate()
+ bypubdate := s.RegularPages.ByPublishDate()
if bypubdate[0].Title != "One" { t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bypubdate[0].Title)@@ -671,7 +671,7 @@
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].Title)}
- bylength := s.regularPages.ByLength()
+ bylength := s.RegularPages.ByLength()
if bylength[0].Title != "One" { t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].Title)}
@@ -710,7 +710,7 @@
t.Fatalf("Failed to build site: %s", err)}
- rbysection, err := s.regularPages.GroupBy("Section", "desc")+ rbysection, err := s.RegularPages.GroupBy("Section", "desc") if err != nil { t.Fatalf("Unable to make PageGroup array: %s", err)}
@@ -730,7 +730,7 @@
t.Errorf("PageGroup has unexpected number of pages. Third group should have '%d' pages, got '%d' pages", 2, len(rbysection[2].Pages))}
- bytype, err := s.regularPages.GroupBy("Type", "asc")+ bytype, err := s.RegularPages.GroupBy("Type", "asc") if err != nil { t.Fatalf("Unable to make PageGroup array: %s", err)}
@@ -750,7 +750,7 @@
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bytype[2].Pages))}
- bydate, err := s.regularPages.GroupByDate("2006-01", "asc")+ bydate, err := s.RegularPages.GroupByDate("2006-01", "asc") if err != nil { t.Fatalf("Unable to make PageGroup array: %s", err)}
@@ -770,7 +770,7 @@
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bydate[2].Pages))}
- bypubdate, err := s.regularPages.GroupByPublishDate("2006")+ bypubdate, err := s.RegularPages.GroupByPublishDate("2006") if err != nil { t.Fatalf("Unable to make PageGroup array: %s", err)}
@@ -787,7 +787,7 @@
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 3, len(bypubdate[0].Pages))}
- byparam, err := s.regularPages.GroupByParam("my_param", "desc")+ byparam, err := s.RegularPages.GroupByParam("my_param", "desc") if err != nil { t.Fatalf("Unable to make PageGroup array: %s", err)}
@@ -807,12 +807,12 @@
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byparam[0].Pages))}
- _, err = s.regularPages.GroupByParam("not_exist")+ _, err = s.RegularPages.GroupByParam("not_exist") if err == nil { t.Errorf("GroupByParam didn't return an expected error")}
- byOnlyOneParam, err := s.regularPages.GroupByParam("only_one")+ byOnlyOneParam, err := s.RegularPages.GroupByParam("only_one") if err != nil { t.Fatalf("Unable to make PageGroup array: %s", err)}
@@ -823,7 +823,7 @@
t.Errorf("PageGroup array in unexpected order. First group key should be '%s', got '%s'", "yes", byOnlyOneParam[0].Key)}
- byParamDate, err := s.regularPages.GroupByParamDate("my_date", "2006-01")+ byParamDate, err := s.RegularPages.GroupByParamDate("my_date", "2006-01") if err != nil { t.Fatalf("Unable to make PageGroup array: %s", err)}
--- a/hugolib/taxonomy.go
+++ b/hugolib/taxonomy.go
@@ -14,6 +14,7 @@
package hugolib
import (
+ "fmt"
"sort"
"github.com/spf13/hugo/helpers"
@@ -23,6 +24,10 @@
// e.g. List['tags'] => TagTaxonomy (from above)
type TaxonomyList map[string]Taxonomy
+func (tl TaxonomyList) String() string {+ return fmt.Sprintf("TaxonomyList(%d)", len(tl))+}
+
// A Taxonomy is a map of keywords to a list of pages.
// For example
// TagTaxonomy['technology'] = WeightedPages
@@ -37,6 +42,10 @@
type WeightedPage struct {Weight int
Page *Page
+}
+
+func (w WeightedPage) String() string {+ return fmt.Sprintf("WeightedPage(%d,%q)", w.Weight, w.Page.Title)}
// OrderedTaxonomy is another representation of an Taxonomy using an array rather than a map.
--
⑨