ref: 5dfc1dedb8ac53b7a2d3823d06808ae86f90b3d9
parent: 24bbfe7d32fe151487ff87394433622e3f69eee4
author: spf13 <steve.francia@gmail.com>
date: Thu Oct 16 16:20:09 EDT 2014
Big refactor of how source files are used. Also added default destination extension option.
--- a/commands/convert.go
+++ b/commands/convert.go
@@ -99,8 +99,8 @@
jww.FEEDBACK.Println("processing", len(site.Source.Files()), "content files") for _, file := range site.Source.Files() {- jww.INFO.Println("Attempting to convert", file.LogicalName)- page, err := hugolib.NewPage(file.LogicalName)
+ jww.INFO.Println("Attempting to convert", file.LogicalName())+ page, err := hugolib.NewPage(file.LogicalName())
if err != nil {return err
}
@@ -107,12 +107,12 @@
psr, err := parser.ReadFrom(file.Contents)
if err != nil {- jww.ERROR.Println("Error processing file:", path.Join(file.Dir, file.LogicalName))+ jww.ERROR.Println("Error processing file:", file.Path())return err
}
metadata, err := psr.Metadata()
if err != nil {- jww.ERROR.Println("Error processing file:", path.Join(file.Dir, file.LogicalName))+ jww.ERROR.Println("Error processing file:", file.Path())return err
}
@@ -128,7 +128,7 @@
metadata = newmetadata
}
- page.Dir = file.Dir
+ //page.Dir = file.Dir
page.SetSourceContent(psr.Content())
page.SetSourceMetaData(metadata, mark)
--- a/commands/hugo.go
+++ b/commands/hugo.go
@@ -118,6 +118,7 @@
viper.SetDefault("Permalinks", make(hugolib.PermalinkOverrides, 0)) viper.SetDefault("Sitemap", hugolib.Sitemap{Priority: -1}) viper.SetDefault("PygmentsStyle", "monokai")+ viper.SetDefault("DefaultExtension", "html") viper.SetDefault("PygmentsUseClasses", false) viper.SetDefault("DisableLiveReload", false) viper.SetDefault("PluralizeListTitles", true)--- a/create/content.go
+++ b/create/content.go
@@ -94,7 +94,7 @@
newmetadata["date"] = time.Now().Format(time.RFC3339)
}
- page.Dir = viper.GetString("sourceDir")+ //page.Dir = viper.GetString("sourceDir") page.SetSourceMetaData(newmetadata, parser.FormatToLeadRune(viper.GetString("MetaDataFormat")))page.SetSourceContent(psr.Content())
if err = page.SafeSaveSourceAs(path.Join(viper.GetString("contentDir"), name)); err != nil {--- a/docs/layouts/partials/menu.html
+++ b/docs/layouts/partials/menu.html
@@ -31,7 +31,7 @@
{{end}}<li> <a href="https://github.com/spf13/hugo/issues" target="blank"><i class='fa fa-life-ring'></i>Issues & Help</a> </li>
{{ if .IsPage }}- {{ $File := .File }} {{with $File.FileName }}<li><a href="https://github.com/spf13/hugo/edit/master/docs/content/{{ $File.Dir }}{{ $File.FileName }}" target="blank"><i class='fa fa-edit'></i> Refine this Page</a> </li>{{end}}+ {{ $File := .File }} {{with $File.Path }}<li><a href="https://github.com/spf13/hugo/edit/master/docs/content/{{ $File.Dir }}{{ $File.LogicalName }}" target="blank"><i class='fa fa-edit'></i> Refine this Page</a> </li>{{end}} {{ end }}</ul>
<!-- sidebar menu end-->
--- /dev/null
+++ b/helpers/content.go
@@ -1,0 +1,231 @@
+// Copyright © 2014 Steve Francia <spf@spf13.com>.
+//
+// Licensed under the Simple Public License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://opensource.org/licenses/Simple-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "bytes"
+ "html/template"
+ "os/exec"
+
+ "github.com/russross/blackfriday"
+ "github.com/spf13/viper"
+
+ jww "github.com/spf13/jwalterweatherman"
+
+ "strings"
+)
+
+var SummaryLength = 70
+var SummaryDivider = []byte("<!--more-->")+
+func StripHTML(s string) string {+ output := ""
+
+ // Shortcut strings with no tags in them
+ if !strings.ContainsAny(s, "<>") {+ output = s
+ } else {+ s = strings.Replace(s, "\n", " ", -1)
+ s = strings.Replace(s, "</p>", " \n", -1)
+ s = strings.Replace(s, "<br>", " \n", -1)
+ s = strings.Replace(s, "</br>", " \n", -1)
+
+ // Walk through the string removing all tags
+ b := new(bytes.Buffer)
+ inTag := false
+ for _, r := range s {+ switch r {+ case '<':
+ inTag = true
+ case '>':
+ inTag = false
+ default:
+ if !inTag {+ b.WriteRune(r)
+ }
+ }
+ }
+ output = b.String()
+ }
+ return output
+}
+
+func StripEmptyNav(in []byte) []byte {+ return bytes.Replace(in, []byte("<nav>\n</nav>\n\n"), []byte(``), -1)+}
+
+func BytesToHTML(b []byte) template.HTML {+ return template.HTML(string(b))
+}
+
+func GetHtmlRenderer(defaultFlags int, footnoteref string) blackfriday.Renderer {+ renderParameters := blackfriday.HtmlRendererParameters{+ FootnoteAnchorPrefix: viper.GetString("FootnoteAnchorPrefix"),+ FootnoteReturnLinkContents: viper.GetString("FootnoteReturnLinkContents"),+ }
+
+ if len(footnoteref) != 0 {+ renderParameters.FootnoteAnchorPrefix = footnoteref + ":" +
+ renderParameters.FootnoteAnchorPrefix
+ }
+
+ htmlFlags := defaultFlags
+ htmlFlags |= blackfriday.HTML_USE_XHTML
+ htmlFlags |= blackfriday.HTML_USE_SMARTYPANTS
+ htmlFlags |= blackfriday.HTML_SMARTYPANTS_FRACTIONS
+ htmlFlags |= blackfriday.HTML_SMARTYPANTS_LATEX_DASHES
+ htmlFlags |= blackfriday.HTML_FOOTNOTE_RETURN_LINKS
+
+ return blackfriday.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters)
+}
+
+func GetMarkdownExtensions() int {+ return 0 | blackfriday.EXTENSION_NO_INTRA_EMPHASIS |
+ blackfriday.EXTENSION_TABLES | blackfriday.EXTENSION_FENCED_CODE |
+ blackfriday.EXTENSION_AUTOLINK | blackfriday.EXTENSION_STRIKETHROUGH |
+ blackfriday.EXTENSION_SPACE_HEADERS | blackfriday.EXTENSION_FOOTNOTES |
+ blackfriday.EXTENSION_HEADER_IDS
+}
+
+func MarkdownRender(content []byte, footnoteref string) []byte {+ return blackfriday.Markdown(content, GetHtmlRenderer(0, footnoteref),
+ GetMarkdownExtensions())
+}
+
+func MarkdownRenderWithTOC(content []byte, footnoteref string) []byte {+ return blackfriday.Markdown(content,
+ GetHtmlRenderer(blackfriday.HTML_TOC, footnoteref),
+ GetMarkdownExtensions())
+}
+
+func ExtractTOC(content []byte) (newcontent []byte, toc []byte) {+ origContent := make([]byte, len(content))
+ copy(origContent, content)
+ first := []byte(`<nav>
+<ul>`)
+
+ last := []byte(`</ul>
+</nav>`)
+
+ replacement := []byte(`<nav id="TableOfContents">
+<ul>`)
+
+ startOfTOC := bytes.Index(content, first)
+
+ peekEnd := len(content)
+ if peekEnd > 70+startOfTOC {+ peekEnd = 70 + startOfTOC
+ }
+
+ if startOfTOC < 0 {+ return StripEmptyNav(content), toc
+ }
+ // Need to peek ahead to see if this nav element is actually the right one.
+ correctNav := bytes.Index(content[startOfTOC:peekEnd], []byte(`#toc_0`))
+ if correctNav < 0 { // no match found+ return content, toc
+ }
+ lengthOfTOC := bytes.Index(content[startOfTOC:], last) + len(last)
+ endOfTOC := startOfTOC + lengthOfTOC
+
+ newcontent = append(content[:startOfTOC], content[endOfTOC:]...)
+ toc = append(replacement, origContent[startOfTOC+len(first):endOfTOC]...)
+ return
+}
+
+func RenderBytesWithTOC(content []byte, pagefmt string, footnoteref string) []byte {+ switch pagefmt {+ default:
+ return MarkdownRenderWithTOC(content, footnoteref)
+ case "markdown":
+ return MarkdownRenderWithTOC(content, footnoteref)
+ case "rst":
+ return []byte(GetRstContent(content))
+ }
+}
+
+func RenderBytes(content []byte, pagefmt string, footnoteref string) []byte {+ switch pagefmt {+ default:
+ return MarkdownRender(content, footnoteref)
+ case "markdown":
+ return MarkdownRender(content, footnoteref)
+ case "rst":
+ return []byte(GetRstContent(content))
+ }
+}
+
+func TotalWords(s string) int {+ return len(strings.Fields(s))
+}
+
+func WordCount(s string) map[string]int {+ m := make(map[string]int)
+ for _, f := range strings.Fields(s) {+ m[f] += 1
+ }
+
+ return m
+}
+
+func RemoveSummaryDivider(content []byte) []byte {+ return bytes.Replace(content, SummaryDivider, []byte(""), -1)+}
+
+func TruncateWords(s string, max int) string {+ words := strings.Fields(s)
+ if max > len(words) {+ return strings.Join(words, " ")
+ }
+
+ return strings.Join(words[:max], " ")
+}
+
+func TruncateWordsToWholeSentence(s string, max int) string {+ words := strings.Fields(s)
+ if max > len(words) {+ return strings.Join(words, " ")
+ }
+
+ for counter, word := range words[max:] {+ if strings.HasSuffix(word, ".") ||
+ strings.HasSuffix(word, "?") ||
+ strings.HasSuffix(word, ".\"") ||
+ strings.HasSuffix(word, "!") {+ return strings.Join(words[:max+counter+1], " ")
+ }
+ }
+
+ return strings.Join(words[:max], " ")
+}
+
+func GetRstContent(content []byte) string {+ cleanContent := bytes.Replace(content, SummaryDivider, []byte(""), 1)+
+ cmd := exec.Command("rst2html.py", "--leave-comments")+ cmd.Stdin = bytes.NewReader(cleanContent)
+ var out bytes.Buffer
+ cmd.Stdout = &out
+ if err := cmd.Run(); err != nil {+ jww.ERROR.Println(err)
+ }
+
+ rstLines := strings.Split(out.String(), "\n")
+ for i, line := range rstLines {+ if strings.HasPrefix(line, "<body>") {+ rstLines = (rstLines[i+1 : len(rstLines)-3])
+ }
+ }
+ return strings.Join(rstLines, "\n")
+}
--- a/helpers/general.go
+++ b/helpers/general.go
@@ -15,43 +15,14 @@
import (
"bytes"
+ "crypto/md5"
+ "encoding/hex"
"fmt"
+ "io"
"net"
"strings"
)
-func StripHTML(s string) string {- output := ""
-
- // Shortcut strings with no tags in them
- if !strings.ContainsAny(s, "<>") {- output = s
- } else {- s = strings.Replace(s, "\n", " ", -1)
- s = strings.Replace(s, "</p>", " \n", -1)
- s = strings.Replace(s, "<br>", " \n", -1)
- s = strings.Replace(s, "</br>", " \n", -1)
-
- // Walk through the string removing all tags
- b := new(bytes.Buffer)
- inTag := false
- for _, r := range s {- switch r {- case '<':
- inTag = true
- case '>':
- inTag = false
- default:
- if !inTag {- b.WriteRune(r)
- }
- }
- }
- output = b.String()
- }
- return output
-}
-
func FindAvailablePort() (*net.TCPAddr, error) { l, err := net.Listen("tcp", ":0") if err == nil {@@ -63,4 +34,43 @@
return nil, fmt.Errorf("Unable to obtain a valid tcp port. %v", addr)}
return nil, err
+}
+
+func GuessType(in string) string {+ switch strings.ToLower(in) {+ case "md", "markdown", "mdown":
+ return "markdown"
+ case "rst":
+ return "rst"
+ case "html", "htm":
+ return "html"
+ }
+
+ return "unknown"
+}
+
+func ReaderToBytes(lines io.Reader) []byte {+ b := new(bytes.Buffer)
+ b.ReadFrom(lines)
+ return b.Bytes()
+}
+
+// sliceToLower goes through the source slice and lowers all values.
+func SliceToLower(s []string) []string {+ if s == nil {+ return nil
+ }
+
+ l := make([]string, len(s))
+ for i, v := range s {+ l[i] = strings.ToLower(v)
+ }
+
+ return l
+}
+
+func Md5String(f string) string {+ h := md5.New()
+ h.Write([]byte(f))
+ return hex.EncodeToString(h.Sum([]byte{}))}
--- a/helpers/path.go
+++ b/helpers/path.go
@@ -166,16 +166,52 @@
return
}
+func GetRelativePath(path, base string) (final string, err error) {+ if filepath.IsAbs(path) && base == "" {+ return "", errors.New("source: missing base directory")+ }
+ name := filepath.Clean(path)
+ base = filepath.Clean(base)
+
+ name, err = filepath.Rel(base, name)
+ if err != nil {+ return "", err
+ }
+ name = filepath.ToSlash(name)
+ return name, nil
+}
+
+// Given a source path, determine the section
func GuessSection(in string) string {- x := strings.Split(in, "/")
- x = x[:len(x)-1]
- if len(x) == 0 {+ parts := strings.Split(in, "/")
+
+ if len(parts) == 0 {return ""
}
- if x[0] == "content" {- x = x[1:]
+
+ // trim filename
+ if !strings.HasSuffix(in, "/") {+ parts = parts[:len(parts)-1]
}
- return path.Join(x...)
+
+ if len(parts) == 0 {+ return ""
+ }
+
+ // if first directory is "content", return second directory
+ section := ""
+
+ if parts[0] == "content" && len(parts) > 1 {+ section = parts[1]
+ } else {+ section = parts[0]
+ }
+
+ if section == "." {+ return ""
+ }
+
+ return section
}
func PathPrep(ugly bool, in string) string {--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -15,8 +15,6 @@
import (
"bytes"
- "crypto/md5"
- "encoding/hex"
"errors"
"fmt"
"html/template"
@@ -23,48 +21,57 @@
"io"
"net/url"
"path"
- "path/filepath"
"strings"
"time"
- "github.com/russross/blackfriday"
"github.com/spf13/cast"
"github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/parser"
+ "github.com/spf13/hugo/source"
jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
)
type Page struct {- Status string
- Images []string
- rawContent []byte
- Content template.HTML
- Summary template.HTML
- TableOfContents template.HTML
- Truncated bool
- plain string // TODO should be []byte
- Params map[string]interface{}- contentType string
- Draft bool
- PublishDate time.Time
- Aliases []string
- Tmpl Template
- Markup string
- renderable bool
- layout string
- linkTitle string
- frontmatter []byte
- sourceFrontmatter []byte
- sourceContent []byte
+ Params map[string]interface{}+ Content template.HTML
+ Summary template.HTML
+ Aliases []string
+ Status string
+ Images []string
+ TableOfContents template.HTML
+ Truncated bool
+ Draft bool
+ PublishDate time.Time
+ Tmpl Template
+ Markup string
+
+ extension string
+ contentType string
+ renderable bool
+ layout string
+ linkTitle string
+ frontmatter []byte
+ rawContent []byte
+ plain string // TODO should be []byte
+ //sourceFrontmatter []byte
+ //sourceContent []byte
PageMeta
- File
+ //SourceFile source.File
+ Source
Position
Node
+ //Destination source.File
}
-type File struct {- Name, FileName, Extension, Dir, UniqueId string
+//type File struct {+//Name, FileName, Extension, Dir, UniqueId string
+//}
+
+type Source struct {+ Frontmatter []byte
+ Content []byte
+ source.File
}
type PageMeta struct {@@ -97,75 +104,40 @@
}
func (p *Page) UniqueId() string {- return p.File.UniqueId
+ return p.Source.UniqueId()
}
func (p *Page) setSummary() {- if bytes.Contains(p.rawContent, summaryDivider) {+ if bytes.Contains(p.rawContent, helpers.SummaryDivider) {// If user defines split:
// Split then render
p.Truncated = true // by definition
- header := bytes.Split(p.rawContent, summaryDivider)[0]
- p.Summary = bytesToHTML(p.renderBytes(header))
+ header := bytes.Split(p.rawContent, helpers.SummaryDivider)[0]
+ p.Summary = helpers.BytesToHTML(p.renderBytes(header))
} else {// If hugo defines split:
// render, strip html, then split
plain := strings.TrimSpace(p.Plain())
- p.Summary = bytesToHTML([]byte(TruncateWordsToWholeSentence(plain, summaryLength)))
+ p.Summary = helpers.BytesToHTML([]byte(helpers.TruncateWordsToWholeSentence(plain, helpers.SummaryLength)))
p.Truncated = len(p.Summary) != len(plain)
}
}
-func stripEmptyNav(in []byte) []byte {- return bytes.Replace(in, []byte("<nav>\n</nav>\n\n"), []byte(``), -1)-}
-
-func bytesToHTML(b []byte) template.HTML {- return template.HTML(string(b))
-}
-
func (p *Page) renderBytes(content []byte) []byte {- return renderBytes(content, p.guessMarkupType(), p.UniqueId())
+ return helpers.RenderBytes(content, p.guessMarkupType(), p.UniqueId())
}
func (p *Page) renderContent(content []byte) []byte {- return renderBytesWithTOC(content, p.guessMarkupType(), p.UniqueId())
+ return helpers.RenderBytesWithTOC(content, p.guessMarkupType(), p.UniqueId())
}
-func renderBytesWithTOC(content []byte, pagefmt string, footnoteref string) []byte {- switch pagefmt {- default:
- return markdownRenderWithTOC(content, footnoteref)
- case "markdown":
- return markdownRenderWithTOC(content, footnoteref)
- case "rst":
- return []byte(getRstContent(content))
- }
-}
-
-func renderBytes(content []byte, pagefmt string, footnoteref string) []byte {- switch pagefmt {- default:
- return markdownRender(content, footnoteref)
- case "markdown":
- return markdownRender(content, footnoteref)
- case "rst":
- return []byte(getRstContent(content))
- }
-}
-
func newPage(filename string) *Page {- name := filepath.Base(filename)
- // strip off the extension
- name = name[:len(name)-len(filepath.Ext(name))]
-
page := Page{contentType: "",- File: File{Name: name, FileName: filename, Extension: "html", UniqueId: md5ForFilename(filename)},+ Source: Source{File: *source.NewFile(filename)}, Node: Node{Keywords: []string{}, Sitemap: Sitemap{Priority: -1}}, Params: make(map[string]interface{})}- jww.DEBUG.Println("Reading from", page.File.FileName)- page.guessSection()
+ jww.DEBUG.Println("Reading from", page.File.Path())return &page
}
@@ -173,18 +145,12 @@
return p.renderable
}
-func (p *Page) guessSection() {- if p.Section == "" {- p.Section = helpers.GuessSection(p.FileName)
- }
-}
-
func (page *Page) Type() string { if page.contentType != "" {return page.contentType
}
- page.guessSection()
- if x := page.Section; x != "" {+
+ if x := page.Section(); x != "" {return x
}
@@ -191,6 +157,10 @@
return "page"
}
+func (page *Page) Section() string {+ return page.Source.Section()
+}
+
func (page *Page) Layout(l ...string) []string { if page.layout != "" {return layouts(page.Type(), page.layout)
@@ -261,7 +231,7 @@
}
func (p *Page) analyzePage() {- p.WordCount = TotalWords(p.Plain())
+ p.WordCount = helpers.TotalWords(p.Plain())
p.FuzzyWordCount = int((p.WordCount+100)/100) * 100
p.ReadingTime = int((p.WordCount + 212) / 213)
}
@@ -268,7 +238,7 @@
func (p *Page) permalink() (*url.URL, error) {baseUrl := string(p.Site.BaseUrl)
- dir := strings.TrimSpace(p.Dir)
+ dir := strings.TrimSpace(p.Source.Dir())
pSlug := strings.TrimSpace(p.Slug)
pUrl := strings.TrimSpace(p.Url)
var permalink string
@@ -278,7 +248,7 @@
return helpers.MakePermalink(baseUrl, pUrl), nil
}
- if override, ok := p.Site.Permalinks[p.Section]; ok {+ if override, ok := p.Site.Permalinks[p.Section()]; ok {permalink, err = override.Expand(p)
if err != nil {@@ -287,10 +257,10 @@
// fmt.Printf("have a section override for %q in section %s → %s\n", p.Title, p.Section, permalink) } else { if len(pSlug) > 0 {- permalink = helpers.UrlPrep(viper.GetBool("UglyUrls"), path.Join(dir, p.Slug+"."+p.Extension))+ permalink = helpers.UrlPrep(viper.GetBool("UglyUrls"), path.Join(dir, p.Slug+"."+p.Extension())) } else {- _, t := path.Split(p.FileName)
- permalink = helpers.UrlPrep(viper.GetBool("UglyUrls"), path.Join(dir, helpers.ReplaceExtension(strings.TrimSpace(t), p.Extension)))+ _, t := path.Split(p.Source.LogicalName())
+ permalink = helpers.UrlPrep(viper.GetBool("UglyUrls"), path.Join(dir, helpers.ReplaceExtension(strings.TrimSpace(t), p.Extension())))}
}
@@ -297,6 +267,14 @@
return helpers.MakePermalink(baseUrl, permalink), nil
}
+func (p *Page) Extension() string {+ if p.extension != "" {+ return p.extension
+ } else {+ return viper.GetString("DefaultExtension")+ }
+}
+
func (p *Page) LinkTitle() string { if len(p.linkTitle) > 0 {return p.linkTitle
@@ -370,6 +348,8 @@
page.Url = helpers.Urlize(cast.ToString(v))
case "type":
page.contentType = cast.ToString(v)
+ case "extension", "ext":
+ page.extension = cast.ToString(v)
case "keywords":
page.Keywords = cast.ToStringSlice(v)
case "date":
@@ -445,7 +425,7 @@
case time.Time:
return cast.ToTime(v)
case []string:
- return sliceToLower(v.([]string))
+ return helpers.SliceToLower(v.([]string))
}
return nil
}
@@ -543,33 +523,15 @@
func (page *Page) guessMarkupType() string {// First try the explicitly set markup from the frontmatter
if page.Markup != "" {- format := guessType(page.Markup)
+ format := helpers.GuessType(page.Markup)
if format != "unknown" {return format
}
}
- // Then try to guess from the extension
- ext := strings.ToLower(path.Ext(page.FileName))
- if strings.HasPrefix(ext, ".") {- return guessType(ext[1:])
- }
-
- return "unknown"
+ return helpers.GuessType(page.Source.Ext())
}
-func guessType(in string) string {- switch strings.ToLower(in) {- case "md", "markdown", "mdown":
- return "markdown"
- case "rst":
- return "rst"
- case "html", "htm":
- return "html"
- }
- return "unknown"
-}
-
func (page *Page) detectFrontMatter() (f *parser.FrontmatterType) {return parser.DetectFrontMatter(rune(page.frontmatter[0]))
}
@@ -585,7 +547,7 @@
meta, err := psr.Metadata()
if meta != nil { if err != nil {- jww.ERROR.Printf("Error parsing page meta data for %s", page.FileName)+ jww.ERROR.Printf("Error parsing page meta data for %s", page.File.Path())jww.ERROR.Println(err)
return err
}
@@ -601,7 +563,7 @@
}
func (page *Page) SetSourceContent(content []byte) {- page.sourceContent = content
+ page.Source.Content = content
}
func (page *Page) SetSourceMetaData(in interface{}, mark rune) (err error) {@@ -611,7 +573,7 @@
}
by = append(by, '\n')
- page.sourceFrontmatter = by
+ page.Source.Frontmatter = by
return nil
}
@@ -626,8 +588,8 @@
func (page *Page) saveSourceAs(path string, safe bool) error {b := new(bytes.Buffer)
- b.Write(page.sourceFrontmatter)
- b.Write(page.sourceContent)
+ b.Write(page.Source.Frontmatter)
+ b.Write(page.Source.Content)
err := page.saveSource(b.Bytes(), path, safe)
if err != nil {@@ -666,100 +628,19 @@
markupType := page.guessMarkupType()
switch markupType {case "markdown", "rst":
- tmpContent, tmpTableOfContents := extractTOC(page.renderContent(RemoveSummaryDivider(page.rawContent)))
- page.Content = bytesToHTML(tmpContent)
- page.TableOfContents = bytesToHTML(tmpTableOfContents)
+ tmpContent, tmpTableOfContents := helpers.ExtractTOC(page.renderContent(helpers.RemoveSummaryDivider(page.rawContent)))
+ page.Content = helpers.BytesToHTML(tmpContent)
+ page.TableOfContents = helpers.BytesToHTML(tmpTableOfContents)
case "html":
- page.Content = bytesToHTML(page.rawContent)
+ page.Content = helpers.BytesToHTML(page.rawContent)
default:
- return fmt.Errorf("Error converting unsupported file type '%s' for page '%s'", markupType, page.FileName)+ return fmt.Errorf("Error converting unsupported file type '%s' for page '%s'", markupType, page.Source.Path())}
return nil
}
-func getHtmlRenderer(defaultFlags int, footnoteref string) blackfriday.Renderer {- renderParameters := blackfriday.HtmlRendererParameters{- FootnoteAnchorPrefix: viper.GetString("FootnoteAnchorPrefix"),- FootnoteReturnLinkContents: viper.GetString("FootnoteReturnLinkContents"),- }
-
- if len(footnoteref) != 0 {- renderParameters.FootnoteAnchorPrefix = footnoteref + ":" +
- renderParameters.FootnoteAnchorPrefix
- }
-
- htmlFlags := defaultFlags
- htmlFlags |= blackfriday.HTML_USE_XHTML
- htmlFlags |= blackfriday.HTML_USE_SMARTYPANTS
- htmlFlags |= blackfriday.HTML_SMARTYPANTS_FRACTIONS
- htmlFlags |= blackfriday.HTML_SMARTYPANTS_LATEX_DASHES
- htmlFlags |= blackfriday.HTML_FOOTNOTE_RETURN_LINKS
-
- return blackfriday.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters)
-}
-
-func getMarkdownExtensions() int {- return 0 | blackfriday.EXTENSION_NO_INTRA_EMPHASIS |
- blackfriday.EXTENSION_TABLES | blackfriday.EXTENSION_FENCED_CODE |
- blackfriday.EXTENSION_AUTOLINK | blackfriday.EXTENSION_STRIKETHROUGH |
- blackfriday.EXTENSION_SPACE_HEADERS | blackfriday.EXTENSION_FOOTNOTES |
- blackfriday.EXTENSION_HEADER_IDS
-}
-
-func markdownRender(content []byte, footnoteref string) []byte {- return blackfriday.Markdown(content, getHtmlRenderer(0, footnoteref),
- getMarkdownExtensions())
-}
-
-func markdownRenderWithTOC(content []byte, footnoteref string) []byte {- return blackfriday.Markdown(content,
- getHtmlRenderer(blackfriday.HTML_TOC, footnoteref),
- getMarkdownExtensions())
-}
-
-func extractTOC(content []byte) (newcontent []byte, toc []byte) {- origContent := make([]byte, len(content))
- copy(origContent, content)
- first := []byte(`<nav>
-<ul>`)
-
- last := []byte(`</ul>
-</nav>`)
-
- replacement := []byte(`<nav id="TableOfContents">
-<ul>`)
-
- startOfTOC := bytes.Index(content, first)
-
- peekEnd := len(content)
- if peekEnd > 70+startOfTOC {- peekEnd = 70 + startOfTOC
- }
-
- if startOfTOC < 0 {- return stripEmptyNav(content), toc
- }
- // Need to peek ahead to see if this nav element is actually the right one.
- correctNav := bytes.Index(content[startOfTOC:peekEnd], []byte(`#toc_0`))
- if correctNav < 0 { // no match found- return content, toc
- }
- lengthOfTOC := bytes.Index(content[startOfTOC:], last) + len(last)
- endOfTOC := startOfTOC + lengthOfTOC
-
- newcontent = append(content[:startOfTOC], content[endOfTOC:]...)
- toc = append(replacement, origContent[startOfTOC+len(first):endOfTOC]...)
- return
-}
-
-func ReaderToBytes(lines io.Reader) []byte {- b := new(bytes.Buffer)
- b.ReadFrom(lines)
- return b.Bytes()
-}
-
func (p *Page) FullFilePath() string {- return path.Join(p.Dir, p.FileName)
+ return path.Join(p.Source.Dir(), p.Source.Path())
}
func (p *Page) TargetPath() (outfile string) {@@ -775,7 +656,7 @@
}
// If there's a Permalink specification, we use that
- if override, ok := p.Site.Permalinks[p.Section]; ok {+ if override, ok := p.Site.Permalinks[p.Section()]; ok {var err error
outfile, err = override.Expand(p)
if err == nil {@@ -787,32 +668,11 @@
}
if len(strings.TrimSpace(p.Slug)) > 0 {- outfile = strings.TrimSpace(p.Slug) + "." + p.Extension
+ outfile = strings.TrimSpace(p.Slug) + "." + p.Extension()
} else {// Fall back to filename
- _, t := path.Split(p.FileName)
- outfile = helpers.ReplaceExtension(strings.TrimSpace(t), p.Extension)
+ outfile = helpers.ReplaceExtension(p.Source.LogicalName(), p.Extension())
}
- return path.Join(p.Dir, strings.TrimSpace(outfile))
-}
-
-// sliceToLower goes through the source slice and lowers all values.
-func sliceToLower(s []string) []string {- if s == nil {- return nil
- }
-
- l := make([]string, len(s))
- for i, v := range s {- l[i] = strings.ToLower(v)
- }
-
- return l
-}
-
-func md5ForFilename(f string) string {- h := md5.New()
- h.Write([]byte(f))
- return hex.EncodeToString(h.Sum([]byte{}))+ return path.Join(p.Source.Dir(), strings.TrimSpace(outfile))
}
--- a/hugolib/pageGroup.go
+++ b/hugolib/pageGroup.go
@@ -83,20 +83,25 @@
direction = "desc"
}
- ppt := reflect.TypeOf(&Page{})+ ppt := reflect.TypeOf(&Page{}) // *hugolib.Page+
ft, ok := ppt.Elem().FieldByName(key)
+
if !ok { return nil, errors.New("No such field in Page struct")}
+
tmp := reflect.MakeMap(reflect.MapOf(ft.Type, reflect.SliceOf(ppt)))
for _, e := range p {ppv := reflect.ValueOf(e)
fv := ppv.Elem().FieldByName(key)
- if !tmp.MapIndex(fv).IsValid() {- tmp.SetMapIndex(fv, reflect.MakeSlice(reflect.SliceOf(ppt), 0, 0))
+ if !fv.IsNil() {+ if !tmp.MapIndex(fv).IsValid() {+ tmp.SetMapIndex(fv, reflect.MakeSlice(reflect.SliceOf(ppt), 0, 0))
+ }
+ tmp.SetMapIndex(fv, reflect.Append(tmp.MapIndex(fv), ppv))
}
- tmp.SetMapIndex(fv, reflect.Append(tmp.MapIndex(fv), ppv))
}
var r []PageGroup
--- a/hugolib/page_permalink_test.go
+++ b/hugolib/page_permalink_test.go
@@ -4,6 +4,7 @@
"html/template"
"testing"
+ "github.com/spf13/hugo/source"
"github.com/spf13/viper"
)
@@ -33,6 +34,8 @@
{"x/y/z/boofar.md", "x/y/z", "", "", "/z/y/q/", false, "/z/y/q/", "/z/y/q/"},}
+ viper.Set("DefaultExtension", "html")+
for i, test := range tests { viper.Set("uglyurls", test.uglyurls) p := &Page{@@ -45,7 +48,7 @@
BaseUrl: test.base,
},
},
- File: File{FileName: test.file, Dir: test.dir, Extension: "html"},+ Source: Source{File: *source.NewFile(test.file)},}
if test.slug != "" {--- a/hugolib/page_test.go
+++ b/hugolib/page_test.go
@@ -6,6 +6,8 @@
"strings"
"testing"
"time"
+
+ "github.com/spf13/hugo/helpers"
)
var EMPTY_PAGE = ""
@@ -507,7 +509,7 @@
func TestSectionEvaluation(t *testing.T) { page, _ := NewPage("blue/file1.md")page.ReadFrom(strings.NewReader(SIMPLE_PAGE))
- if page.Section != "blue" {+ if page.Section() != "blue" { t.Errorf("Section should be %s, got: %s", "blue", page.Section)}
}
@@ -529,12 +531,12 @@
path string
expectedLayout []string
}{- {SIMPLE_PAGE_NOLAYOUT, path_content_two_dir, L("dub/sub/single.html", "dub/single.html", "_default/single.html")},+ {SIMPLE_PAGE_NOLAYOUT, path_content_two_dir, L("dub/single.html", "_default/single.html")}, {SIMPLE_PAGE_NOLAYOUT, path_content_one_dir, L("gub/single.html", "_default/single.html")}, {SIMPLE_PAGE_NOLAYOUT, path_content_no_dir, L("page/single.html", "_default/single.html")}, {SIMPLE_PAGE_NOLAYOUT, path_one_directory, L("fub/single.html", "_default/single.html")}, {SIMPLE_PAGE_NOLAYOUT, path_no_directory, L("page/single.html", "_default/single.html")},- {SIMPLE_PAGE_LAYOUT_FOOBAR, path_content_two_dir, L("dub/sub/foobar.html", "dub/foobar.html", "_default/foobar.html")},+ {SIMPLE_PAGE_LAYOUT_FOOBAR, path_content_two_dir, L("dub/foobar.html", "_default/foobar.html")}, {SIMPLE_PAGE_LAYOUT_FOOBAR, path_content_one_dir, L("gub/foobar.html", "_default/foobar.html")}, {SIMPLE_PAGE_LAYOUT_FOOBAR, path_one_directory, L("fub/foobar.html", "_default/foobar.html")}, {SIMPLE_PAGE_LAYOUT_FOOBAR, path_no_directory, L("page/foobar.html", "_default/foobar.html")},@@ -576,7 +578,7 @@
}
for _, test := range tests {- res := sliceToLower(test.value)
+ res := helpers.SliceToLower(test.value)
for i, val := range res { if val != test.expected[i] { t.Errorf("Case mismatch. Expected %s, got %s", test.expected[i], res[i])--- a/hugolib/path_seperators_test.go
+++ b/hugolib/path_seperators_test.go
@@ -17,7 +17,7 @@
if err != nil { t.Fatalf("Error in NewPageFrom")}
- if p.Section != "" {+ if p.Section() != "" { t.Fatalf("No section should be set for a file path: foobar")}
}
@@ -31,17 +31,16 @@
{path.Join("sub", "foobar.html"), "sub", L("sub/single.html", "_default/single.html")}, {path.Join("content", "foobar.html"), "", L("page/single.html", "_default/single.html")}, {path.Join("content", "sub", "foobar.html"), "sub", L("sub/single.html", "_default/single.html")},- {path.Join("content", "dub", "sub", "foobar.html"), "dub/sub", L("dub/sub/single.html", "dub/single.html", "_default/single.html")},+ {path.Join("content", "dub", "sub", "foobar.html"), "dub", L("dub/single.html", "_default/single.html")},}
for _, el := range toCheck {p, err := NewPageFrom(strings.NewReader(SIMPLE_PAGE_YAML), el.input)
- p.guessSection()
if err != nil { t.Errorf("Reading from SIMPLE_PAGE_YAML resulted in an error: %s", err)}
- if p.Section != el.section {- t.Errorf("Section not set to %s for page %s. Got: %s", el.section, el.input, p.Section)+ if p.Section() != el.section {+ t.Errorf("Section not set to %s for page %s. Got: %s", el.section, el.input, p.Section())}
for _, y := range el.layout {--- a/hugolib/permalinks.go
+++ b/hugolib/permalinks.go
@@ -3,7 +3,6 @@
import (
"errors"
"fmt"
- "path/filepath"
"strconv"
"strings"
@@ -120,9 +119,9 @@
// pageToPermalinkFilename returns the URL-safe form of the filename
func pageToPermalinkFilename(p *Page, _ string) (string, error) {- var extension = filepath.Ext(p.FileName)
- var name = p.FileName[0 : len(p.FileName)-len(extension)]
- return helpers.Urlize(name), nil
+ //var extension = p.Source.Ext
+ //var name = p.Source.Path()[0 : len(p.Source.Path())-len(extension)]
+ return helpers.Urlize(p.Source.BaseFileName()), nil
}
// if the page has a slug, return the slug, else return the title
@@ -143,7 +142,7 @@
func pageToPermalinkSection(p *Page, _ string) (string, error) {// Page contains Node contains UrlPath which has Section
- return p.Section, nil
+ return p.Section(), nil
}
func init() {--- a/hugolib/planner.go
+++ b/hugolib/planner.go
@@ -11,7 +11,7 @@
}
for _, p := range s.Pages {- fmt.Fprintf(out, "%s", p.FileName)
+ fmt.Fprintf(out, "%s", p.Source.Path())
if p.IsRenderable() {fmt.Fprintf(out, " (renderer: markdown)")
} else {--- a/hugolib/shortcode.go
+++ b/hugolib/shortcode.go
@@ -20,6 +20,7 @@
"strings"
"unicode"
+ "github.com/spf13/hugo/helpers"
jww "github.com/spf13/jwalterweatherman"
)
@@ -93,7 +94,7 @@
var data = &ShortcodeWithPage{Params: params, Page: p} if endStart > 0 {s := stringToParse[leadEnd+3 : leadEnd+endStart]
- data.Inner = template.HTML(renderBytes([]byte(CleanP(ShortcodesHandle(s, p, t))), p.guessMarkupType(), p.UniqueId()))
+ data.Inner = template.HTML(helpers.RenderBytes([]byte(CleanP(ShortcodesHandle(s, p, t))), p.guessMarkupType(), p.UniqueId()))
remainder := CleanP(stringToParse[leadEnd+endEnd:])
return CleanP(stringToParse[:leadStart]) +
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -236,7 +236,7 @@
func (s *Site) checkDescriptions() { for _, p := range s.Pages { if len(p.Description) < 60 {- jww.FEEDBACK.Println(p.FileName + " ")
+ jww.FEEDBACK.Println(p.Source.Path() + " ")
}
}
}
@@ -340,7 +340,7 @@
for i := 0; i < procs*4; i++ {wg.Add(1)
- go pageReader(s, filechan, results, wg)
+ go sourceReader(s, filechan, results, wg)
}
errs := make(chan error)
@@ -397,10 +397,10 @@
return fmt.Errorf("%s\n%s", readErrs, renderErrs)}
-func pageReader(s *Site, files <-chan *source.File, results chan<- pageResult, wg *sync.WaitGroup) {+func sourceReader(s *Site, files <-chan *source.File, results chan<- pageResult, wg *sync.WaitGroup) {defer wg.Done()
for file := range files {- page, err := NewPage(file.LogicalName)
+ page, err := NewPage(file.Path())
if err != nil { results <- pageResult{nil, err}continue
@@ -407,8 +407,6 @@
}
page.Site = &s.Info
page.Tmpl = s.Tmpl
- page.Section = file.Section
- page.Dir = file.Dir
if err := page.ReadFrom(file.Contents); err != nil { results <- pageResult{nil, err}continue
@@ -604,7 +602,7 @@
x := WeightedPage{weight.(int), p}s.Taxonomies[plural].Add(v, x)
} else {- jww.ERROR.Printf("Invalid %s in %s\n", plural, p.File.FileName)+ jww.ERROR.Printf("Invalid %s in %s\n", plural, p.File.Path())}
}
}
@@ -620,7 +618,7 @@
func (s *Site) assembleSections() { for i, p := range s.Pages {- s.Sections.Add(p.Section, WeightedPage{s.Pages[i].Weight, s.Pages[i]})+ s.Sections.Add(p.Section(), WeightedPage{s.Pages[i].Weight, s.Pages[i]})}
for k := range s.Sections {--- a/hugolib/site_show_plan_test.go
+++ b/hugolib/site_show_plan_test.go
@@ -47,13 +47,13 @@
for _, x := range gotList { if !stringInSlice(x, expectedList) {- t.Errorf("ShowPlan expected:\n%q\ngot\n%q", expected, got)+ t.Errorf("\nShowPlan expected:\n%q\ngot:\n%q", expected, got)}
}
for _, x := range expectedList { if !stringInSlice(x, gotList) {- t.Errorf("ShowPlan expected:\n%q\ngot\n%q", expected, got)+ t.Errorf("\nShowPlan expected:\n%q\ngot:\n%q", expected, got)}
}
}
--- a/hugolib/site_test.go
+++ b/hugolib/site_test.go
@@ -213,7 +213,7 @@
t.Errorf("%s => OutFile expected: '%s', got: '%s'", test.doc, expected, p.TargetPath())}
- if p.Section != test.expectedSection {+ if p.Section() != test.expectedSection { t.Errorf("%s => p.Section expected: %s, got: %s", test.doc, test.expectedSection, p.Section)}
}
@@ -223,10 +223,10 @@
files := make(map[string][]byte)
target := &target.InMemoryTarget{Files: files} sources := []source.ByteSource{- {"sect/doc1.md", []byte("---\ntitle: doc1\ndraft: true\npublishdate: \"2414-05-29\"\n---\n# doc1\n*some content*"), "sect"},- {"sect/doc2.md", []byte("---\ntitle: doc2\ndraft: true\npublishdate: \"2012-05-29\"\n---\n# doc2\n*some content*"), "sect"},- {"sect/doc3.md", []byte("---\ntitle: doc3\ndraft: false\npublishdate: \"2414-05-29\"\n---\n# doc3\n*some content*"), "sect"},- {"sect/doc4.md", []byte("---\ntitle: doc4\ndraft: false\npublishdate: \"2012-05-29\"\n---\n# doc4\n*some content*"), "sect"},+ {"sect/doc1.md", []byte("---\ntitle: doc1\ndraft: true\npublishdate: \"2414-05-29\"\n---\n# doc1\n*some content*")},+ {"sect/doc2.md", []byte("---\ntitle: doc2\ndraft: true\npublishdate: \"2012-05-29\"\n---\n# doc2\n*some content*")},+ {"sect/doc3.md", []byte("---\ntitle: doc3\ndraft: false\npublishdate: \"2414-05-29\"\n---\n# doc3\n*some content*")},+ {"sect/doc4.md", []byte("---\ntitle: doc4\ndraft: false\npublishdate: \"2012-05-29\"\n---\n# doc4\n*some content*")},}
siteSetup := func() *Site {@@ -283,14 +283,14 @@
files := make(map[string][]byte)
target := &target.InMemoryTarget{Files: files} sources := []source.ByteSource{- {"sect/doc1.html", []byte("---\nmarkup: markdown\n---\n# title\nsome *content*"), "sect"},- {"sect/doc2.html", []byte("<!doctype html><html><body>more content</body></html>"), "sect"},- {"sect/doc3.md", []byte("# doc3\n*some* content"), "sect"},- {"sect/doc4.md", []byte("---\ntitle: doc4\n---\n# doc4\n*some content*"), "sect"},- {"sect/doc5.html", []byte("<!doctype html><html>{{ template \"head\" }}<body>body5</body></html>"), "sect"},- {"sect/doc6.html", []byte("<!doctype html><html>{{ template \"head_abs\" }}<body>body5</body></html>"), "sect"},- {"doc7.html", []byte("<html><body>doc7 content</body></html>"), ""},- {"sect/doc8.html", []byte("---\nmarkup: md\n---\n# title\nsome *content*"), "sect"},+ {"sect/doc1.html", []byte("---\nmarkup: markdown\n---\n# title\nsome *content*")},+ {"sect/doc2.html", []byte("<!doctype html><html><body>more content</body></html>")},+ {"sect/doc3.md", []byte("# doc3\n*some* content")},+ {"sect/doc4.md", []byte("---\ntitle: doc4\n---\n# doc4\n*some content*")},+ {"sect/doc5.html", []byte("<!doctype html><html>{{ template \"head\" }}<body>body5</body></html>")},+ {"sect/doc6.html", []byte("<!doctype html><html>{{ template \"head_abs\" }}<body>body5</body></html>")},+ {"doc7.html", []byte("<html><body>doc7 content</body></html>")},+ {"sect/doc8.html", []byte("---\nmarkup: md\n---\n# title\nsome *content*")},}
viper.Set("verbose", true)@@ -350,8 +350,8 @@
files := make(map[string][]byte)
target := &target.InMemoryTarget{Files: files} sources := []source.ByteSource{- {"sect/doc1.html", []byte("<!doctype html><html><head></head><body><a href=\"#frag1\">link</a></body></html>"), "sect"},- {"content/blue/doc2.html", []byte("---\nf: t\n---\n<!doctype html><html><body>more content</body></html>"), "blue"},+ {"sect/doc1.html", []byte("<!doctype html><html><head></head><body><a href=\"#frag1\">link</a></body></html>")},+ {"content/blue/doc2.html", []byte("---\nf: t\n---\n<!doctype html><html><body>more content</body></html>")},}
for _, canonify := range []bool{true, false} { viper.Set("CanonifyUrls", canonify)@@ -428,10 +428,10 @@
Front Matter with Ordered Pages 4. This is longer content`)
var WEIGHTED_SOURCES = []source.ByteSource{- {"sect/doc1.md", WEIGHTED_PAGE_1, "sect"},- {"sect/doc2.md", WEIGHTED_PAGE_2, "sect"},- {"sect/doc3.md", WEIGHTED_PAGE_3, "sect"},- {"sect/doc4.md", WEIGHTED_PAGE_4, "sect"},+ {"sect/doc1.md", WEIGHTED_PAGE_1},+ {"sect/doc2.md", WEIGHTED_PAGE_2},+ {"sect/doc3.md", WEIGHTED_PAGE_3},+ {"sect/doc4.md", WEIGHTED_PAGE_4},}
func TestOrderedPages(t *testing.T) {@@ -484,13 +484,18 @@
}
var GROUPED_SOURCES = []source.ByteSource{- {"sect1/doc1.md", WEIGHTED_PAGE_1, "sect1"},- {"sect1/doc2.md", WEIGHTED_PAGE_2, "sect1"},- {"sect2/doc3.md", WEIGHTED_PAGE_3, "sect2"},- {"sect3/doc4.md", WEIGHTED_PAGE_4, "sect3"},+ {"sect1/doc1.md", WEIGHTED_PAGE_1},+ {"sect1/doc2.md", WEIGHTED_PAGE_2},+ {"sect2/doc3.md", WEIGHTED_PAGE_3},+ {"sect3/doc4.md", WEIGHTED_PAGE_4},}
func TestGroupedPages(t *testing.T) {+ defer func() {+ if r := recover(); r != nil {+ fmt.Println("Recovered in f", r)+ }
+ }()
files := make(map[string][]byte)
target := &target.InMemoryTarget{Files: files}@@ -583,9 +588,9 @@
files := make(map[string][]byte)
target := &target.InMemoryTarget{Files: files} sources := []source.ByteSource{- {"sect/doc1.md", PAGE_WITH_WEIGHTED_TAXONOMIES_1, "sect"},- {"sect/doc2.md", PAGE_WITH_WEIGHTED_TAXONOMIES_2, "sect"},- {"sect/doc3.md", PAGE_WITH_WEIGHTED_TAXONOMIES_3, "sect"},+ {"sect/doc1.md", PAGE_WITH_WEIGHTED_TAXONOMIES_1},+ {"sect/doc2.md", PAGE_WITH_WEIGHTED_TAXONOMIES_2},+ {"sect/doc3.md", PAGE_WITH_WEIGHTED_TAXONOMIES_3},}
taxonomies := make(map[string]string)
--- a/hugolib/site_url_test.go
+++ b/hugolib/site_url_test.go
@@ -45,8 +45,8 @@
}
var urlFakeSource = []source.ByteSource{- {"content/blue/doc1.md", []byte(SLUG_DOC_1), "blue"},- {"content/blue/doc2.md", []byte(SLUG_DOC_2), "blue"},+ {"content/blue/doc1.md", []byte(SLUG_DOC_1)},+ {"content/blue/doc2.md", []byte(SLUG_DOC_2)},}
func TestPageCount(t *testing.T) {--- a/hugolib/summary.go
+++ b/hugolib/summary.go
@@ -1,76 +1,1 @@
package hugolib
-
-import (
- "bytes"
- "os/exec"
- "strings"
-
- jww "github.com/spf13/jwalterweatherman"
-)
-
-var summaryLength = 70
-var summaryDivider = []byte("<!--more-->")-
-func TotalWords(s string) int {- return len(strings.Fields(s))
-}
-
-func WordCount(s string) map[string]int {- m := make(map[string]int)
- for _, f := range strings.Fields(s) {- m[f] += 1
- }
-
- return m
-}
-
-func RemoveSummaryDivider(content []byte) []byte {- return bytes.Replace(content, summaryDivider, []byte(""), -1)-}
-
-func TruncateWords(s string, max int) string {- words := strings.Fields(s)
- if max > len(words) {- return strings.Join(words, " ")
- }
-
- return strings.Join(words[:max], " ")
-}
-
-func TruncateWordsToWholeSentence(s string, max int) string {- words := strings.Fields(s)
- if max > len(words) {- return strings.Join(words, " ")
- }
-
- for counter, word := range words[max:] {- if strings.HasSuffix(word, ".") ||
- strings.HasSuffix(word, "?") ||
- strings.HasSuffix(word, ".\"") ||
- strings.HasSuffix(word, "!") {- return strings.Join(words[:max+counter+1], " ")
- }
- }
-
- return strings.Join(words[:max], " ")
-}
-
-func getRstContent(content []byte) string {- cleanContent := bytes.Replace(content, summaryDivider, []byte(""), 1)-
- cmd := exec.Command("rst2html.py", "--leave-comments")- cmd.Stdin = bytes.NewReader(cleanContent)
- var out bytes.Buffer
- cmd.Stdout = &out
- if err := cmd.Run(); err != nil {- jww.ERROR.Println(err)
- }
-
- rstLines := strings.Split(out.String(), "\n")
- for i, line := range rstLines {- if strings.HasPrefix(line, "<body>") {- rstLines = (rstLines[i+1 : len(rstLines)-3])
- }
- }
- return strings.Join(rstLines, "\n")
-}
--- /dev/null
+++ b/source/file.go
@@ -1,0 +1,114 @@
+// Copyright © 2014 Steve Francia <spf@spf13.com>.
+//
+// Licensed under the Simple Public License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://opensource.org/licenses/Simple-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package source
+
+import (
+ "io"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "github.com/spf13/hugo/helpers"
+)
+
+type File struct {+ relpath string // Original Full Path eg. /Users/Home/Hugo/foo.txt
+ logicalName string // foo.txt
+ Contents io.Reader
+ section string // The first directory
+ dir string // The full directory Path (minus file name)
+ ext string // Just the ext (eg txt)
+ uniqueId string // MD5 of the filename
+}
+
+func (f *File) UniqueId() string {+ if f.uniqueId == "" {+ f.uniqueId = helpers.Md5String(f.LogicalName())
+ }
+ return f.uniqueId
+}
+
+// Filename without extension
+func (f *File) BaseFileName() string {+ return helpers.Filename(f.LogicalName())
+}
+
+func (f *File) Section() string {+ if f.section != "" {+ return f.section
+ } else {+ f.section = helpers.GuessSection(f.Dir())
+ return f.section
+ }
+}
+
+func (f *File) LogicalName() string {+ if f.logicalName != "" {+ return f.logicalName
+ } else {+ _, f.logicalName = path.Split(f.relpath)
+ return f.logicalName
+ }
+}
+
+//func (f *File) SetDir(dir string) {+//f.dir = dir
+//}
+
+func (f *File) Dir() string {+ if f.dir != "" {+ return f.dir
+ } else {+ f.dir, _ = path.Split(f.relpath)
+ return f.dir
+ }
+}
+
+func (f *File) Extension() string {+ if f.ext != "" {+ return f.ext
+ } else {+ f.ext = strings.TrimPrefix(filepath.Ext(f.LogicalName()), ".")
+ return f.ext
+ }
+}
+
+func (f *File) Ext() string {+ return f.Extension()
+}
+
+func (f *File) Path() string {+ return f.relpath
+}
+
+func NewFileWithContents(relpath string, content io.Reader) *File {+ file := NewFile(relpath)
+ file.Contents = content
+ return file
+}
+
+func NewFile(relpath string) *File {+ return &File{+ relpath: relpath,
+ }
+}
+
+func NewFileFromAbs(base, fullpath string, content io.Reader) (f *File, err error) {+ var name string
+ if name, err = helpers.GetRelativePath(fullpath, base); err != nil {+ return nil, err
+ }
+
+ return NewFileWithContents(name, content), nil
+}
--- a/source/filesystem.go
+++ b/source/filesystem.go
@@ -1,14 +1,27 @@
+// Copyright © 2014 Steve Francia <spf@spf13.com>.
+//
+// Licensed under the Simple Public License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://opensource.org/licenses/Simple-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
package source
import (
"bytes"
- "errors"
"io"
"io/ioutil"
"os"
- "path"
"path/filepath"
"strings"
+
+ "github.com/spf13/hugo/helpers"
)
type Input interface {@@ -15,14 +28,6 @@
Files() []*File
}
-type File struct {- name string
- LogicalName string
- Contents io.Reader
- Section string
- Dir string
-}
-
type Filesystem struct {files []*File
Base string
@@ -29,6 +34,23 @@
AvoidPaths []string
}
+func (f *Filesystem) FilesByExts(exts ...string) []*File {+ var newFiles []*File
+
+ if len(exts) == 0 {+ return f.Files()
+ }
+
+ for _, x := range f.Files() {+ for _, e := range exts {+ if x.Ext() == strings.TrimPrefix(e, ".") {+ newFiles = append(newFiles, x)
+ }
+ }
+ }
+ return newFiles
+}
+
func (f *Filesystem) Files() []*File { if len(f.files) < 1 {f.captureFiles()
@@ -36,47 +58,23 @@
return f.files
}
-var errMissingBaseDir = errors.New("source: missing base directory")-
func (f *Filesystem) add(name string, reader io.Reader) (err error) {+ var file *File
- if name, err = f.getRelativePath(name); err != nil {- return err
- }
+ //if f.Base == "" {+ //file = NewFileWithContents(name, reader)
+ //} else {+ file, err = NewFileFromAbs(f.Base, name, reader)
+ //}
- // section should be the first part of the path
- dir, logical := path.Split(name)
- parts := strings.Split(dir, "/")
- section := parts[0]
-
- if section == "." {- section = ""
+ if err == nil {+ f.files = append(f.files, file)
}
-
- f.files = append(f.files, &File{- name: name,
- LogicalName: logical,
- Contents: reader,
- Section: section,
- Dir: dir,
- })
-
- return
+ return err
}
func (f *Filesystem) getRelativePath(name string) (final string, err error) {- if filepath.IsAbs(name) && f.Base == "" {- return "", errMissingBaseDir
- }
- name = filepath.Clean(name)
- base := filepath.Clean(f.Base)
-
- name, err = filepath.Rel(base, name)
- if err != nil {- return "", err
- }
- name = filepath.ToSlash(name)
- return name, nil
+ return helpers.GetRelativePath(name, f.Base)
}
func (f *Filesystem) captureFiles() {--- a/source/filesystem_test.go
+++ b/source/filesystem_test.go
@@ -32,6 +32,7 @@
}
for _, src := range []*Filesystem{srcDefault, srcWithBase} {+
p := test.filename
if !filepath.IsAbs(test.filename) {p = path.Join(src.Base, test.filename)
@@ -38,10 +39,10 @@
}
if err := src.add(p, bytes.NewReader([]byte(test.content))); err != nil {- if err == errMissingBaseDir {+ if err.Error() == "source: missing base directory" {continue
}
- t.Fatalf("%s add returned and error: %s", p, err)+ t.Fatalf("%s add returned an error: %s", p, err)}
if len(src.Files()) != 1 {@@ -49,8 +50,8 @@
}
f := src.Files()[0]
- if f.LogicalName != test.logical {- t.Errorf("Filename (Base: %q) expected: %q, got: %q", src.Base, test.logical, f.LogicalName)+ if f.LogicalName() != test.logical {+ t.Errorf("Filename (Base: %q) expected: %q, got: %q", src.Base, test.logical, f.LogicalName())}
b := new(bytes.Buffer)
@@ -59,12 +60,12 @@
t.Errorf("File (Base: %q) contents should be %q, got: %q", src.Base, test.content, b.String())}
- if f.Section != test.section {- t.Errorf("File section (Base: %q) expected: %q, got: %q", src.Base, test.section, f.Section)+ if f.Section() != test.section {+ t.Errorf("File section (Base: %q) expected: %q, got: %q", src.Base, test.section, f.Section())}
- if f.Dir != test.dir {- t.Errorf("Dir path (Base: %q) expected: %q, got: %q", src.Base, test.dir, f.Dir)+ if f.Dir() != test.dir {+ t.Errorf("Dir path (Base: %q) expected: %q, got: %q", src.Base, test.dir, f.Dir())}
}
}
--- a/source/inmemory.go
+++ b/source/inmemory.go
@@ -3,17 +3,15 @@
import (
"bytes"
"fmt"
- "path"
)
type ByteSource struct {Name string
Content []byte
- Section string
}
func (b *ByteSource) String() string {- return fmt.Sprintf("%s %s %s", b.Name, b.Section, string(b.Content))+ return fmt.Sprintf("%s %s", b.Name, string(b.Content))}
type InMemorySource struct {@@ -23,12 +21,7 @@
func (i *InMemorySource) Files() (files []*File) {files = make([]*File, len(i.ByteSource))
for i, fake := range i.ByteSource {- files[i] = &File{- LogicalName: fake.Name,
- Contents: bytes.NewReader(fake.Content),
- Section: fake.Section,
- Dir: path.Dir(fake.Name),
- }
+ files[i] = NewFileWithContents(fake.Name, bytes.NewReader(fake.Content))
}
return
}
--
⑨