ref: 14227351fad4acda28c9de90b593c952026f2911
parent: 64572d2d60a6bee0cc0b87dcf0a944d88375d964
author: spf13 <steve.francia@gmail.com>
date: Thu Feb 27 13:32:09 EST 2014
Reorganization of helpers. Centralized Url/Path logic. Fixed #175.
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,7 +1,11 @@
language: go
go:
+ - 1.1
- tip
script:
- go test ./...
- go build
- ./hugo -s docs/
+install:
+ - go get github.com/stretchr/testify
+ - go get -v ./...
--- /dev/null
+++ b/helpers/general.go
@@ -1,0 +1,51 @@
+// Copyright © 2014 Steve Francia <spf@spf13.com>.
+//
+// Licensed under the Simple Public License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://opensource.org/licenses/Simple-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "bytes"
+ "strings"
+)
+
+func StripHTML(s string) string {+ output := ""
+
+ // Shortcut strings with no tags in them
+ if !strings.ContainsAny(s, "<>") {+ output = s
+ } else {+ s = strings.Replace(s, "\n", " ", -1)
+ s = strings.Replace(s, "</p>", " \n", -1)
+ s = strings.Replace(s, "<br>", " \n", -1)
+ s = strings.Replace(s, "</br>", " \n", -1)
+
+ // Walk through the string removing all tags
+ b := new(bytes.Buffer)
+ inTag := false
+ for _, r := range s {+ switch r {+ case '<':
+ inTag = true
+ case '>':
+ inTag = false
+ default:
+ if !inTag {+ b.WriteRune(r)
+ }
+ }
+ }
+ output = b.String()
+ }
+ return output
+}
--- /dev/null
+++ b/helpers/helpers_test.go
@@ -1,0 +1,76 @@
+package helpers
+
+import (
+ "github.com/stretchr/testify/assert"
+ "testing"
+)
+
+func TestPretty(t *testing.T) {+ assert.Equal(t, PrettifyPath("/section/name.html"), "/section/name/index.html")+ assert.Equal(t, PrettifyPath("/section/sub/name.html"), "/section/sub/name/index.html")+ assert.Equal(t, PrettifyPath("/section/name/"), "/section/name/index.html")+ assert.Equal(t, PrettifyPath("/section/name/index.html"), "/section/name/index.html")+ assert.Equal(t, PrettifyPath("/index.html"), "/index.html")+ assert.Equal(t, PrettifyPath("/name.xml"), "/name/index.xml")+ assert.Equal(t, PrettifyPath("/"), "/")+ assert.Equal(t, PrettifyPath(""), "/")+ assert.Equal(t, PrettifyUrl("/section/name.html"), "/section/name")+ assert.Equal(t, PrettifyUrl("/section/sub/name.html"), "/section/sub/name")+ assert.Equal(t, PrettifyUrl("/section/name/"), "/section/name")+ assert.Equal(t, PrettifyUrl("/section/name/index.html"), "/section/name")+ assert.Equal(t, PrettifyUrl("/index.html"), "/")+ assert.Equal(t, PrettifyUrl("/name.xml"), "/name/index.xml")+ assert.Equal(t, PrettifyUrl("/"), "/")+ assert.Equal(t, PrettifyUrl(""), "/")+}
+
+func TestUgly(t *testing.T) {+ assert.Equal(t, Uglify("/section/name.html"), "/section/name.html")+ assert.Equal(t, Uglify("/section/sub/name.html"), "/section/sub/name.html")+ assert.Equal(t, Uglify("/section/name/"), "/section/name.html")+ assert.Equal(t, Uglify("/section/name/index.html"), "/section/name.html")+ assert.Equal(t, Uglify("/index.html"), "/index.html")+ assert.Equal(t, Uglify("/name.xml"), "/name.xml")+ assert.Equal(t, Uglify("/"), "/")+ assert.Equal(t, Uglify(""), "/")+}
+
+func TestMakePath(t *testing.T) {+ tests := []struct {+ input string
+ expected string
+ }{+ {" foo bar ", "foo-bar"},+ {"foo.bar/foo_bar-foo", "foo.bar/foo_bar-foo"},+ {"foo,bar:foo%bar", "foobarfoobar"},+ {"foo/bar.html", "foo/bar.html"},+ {"трям/трям", "трям/трям"},+ }
+
+ for _, test := range tests {+ output := MakePath(test.input)
+ if output != test.expected {+ t.Errorf("Expected %#v, got %#v\n", test.expected, output)+ }
+ }
+}
+
+func TestUrlize(t *testing.T) {+ tests := []struct {+ input string
+ expected string
+ }{+ {" foo bar ", "foo-bar"},+ {"foo.bar/foo_bar-foo", "foo.bar/foo_bar-foo"},+ {"foo,bar:foo%bar", "foobarfoobar"},+ {"foo/bar.html", "foo/bar.html"},+ {"трям/трям", "%D1%82%D1%80%D1%8F%D0%BC/%D1%82%D1%80%D1%8F%D0%BC"},+ }
+
+ for _, test := range tests {+ output := Urlize(test.input)
+ if output != test.expected {+ t.Errorf("Expected %#v, got %#v\n", test.expected, output)+ }
+ }
+}
--- /dev/null
+++ b/helpers/path.go
@@ -1,0 +1,120 @@
+// Copyright © 2014 Steve Francia <spf@spf13.com>.
+//
+// Licensed under the Simple Public License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://opensource.org/licenses/Simple-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "os"
+ "path"
+ "regexp"
+ "strings"
+ "unicode"
+)
+
+var sanitizeRegexp = regexp.MustCompile("[^a-zA-Z0-9./_-]")+
+// Take a string with any characters and replace it so the string could be used in a path.
+// E.g. Social Media -> social-media
+func MakePath(s string) string {+ return UnicodeSanitize(strings.ToLower(strings.Replace(strings.TrimSpace(s), " ", "-", -1)))
+}
+
+func Sanitize(s string) string {+ return sanitizeRegexp.ReplaceAllString(s, "")
+}
+
+func UnicodeSanitize(s string) string {+ source := []rune(s)
+ target := make([]rune, 0, len(source))
+
+ for _, r := range source {+ if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '.' || r == '/' || r == '_' || r == '-' {+ target = append(target, r)
+ }
+ }
+
+ return string(target)
+}
+
+func ReplaceExtension(path string, newExt string) string {+ f, _ := FileAndExt(path)
+ return f + "." + newExt
+}
+
+// Check if Exists && is Directory
+func DirExists(path string) (bool, error) {+ fi, err := os.Stat(path)
+ if err == nil && fi.IsDir() {+ return true, nil
+ }
+ if os.IsNotExist(err) {+ return false, nil
+ }
+ return false, err
+}
+
+// Check if File / Directory Exists
+func Exists(path string) (bool, error) {+ _, err := os.Stat(path)
+ if err == nil {+ return true, nil
+ }
+ if os.IsNotExist(err) {+ return false, nil
+ }
+ return false, err
+}
+
+func FileAndExt(in string) (name string, ext string) {+ ext = path.Ext(in)
+ base := path.Base(in)
+
+ if strings.Contains(base, ".") {+ name = base[:strings.LastIndex(base, ".")]
+ } else {+ name = in
+ }
+
+ return
+}
+
+func PathPrep(ugly bool, in string) string {+ if ugly {+ return Uglify(in)
+ } else {+ return PrettifyPath(in)
+ }
+}
+
+// /section/name.html -> /section/name/index.html
+// /section/name/ -> /section/name/index.html
+// /section/name/index.html -> /section/name/index.html
+func PrettifyPath(in string) string {+ if path.Ext(in) == "" {+ // /section/name/ -> /section/name/index.html
+ if len(in) < 2 {+ return "/"
+ }
+ return path.Join(path.Clean(in), "index.html")
+ } else {+ name, ext := FileAndExt(in)
+ if name == "index" {+ // /section/name/index.html -> /section/name/index.html
+ return path.Clean(in)
+ } else {+ // /section/name.html -> /section/name/index.html
+ return path.Join(path.Dir(in), name, "index"+ext)
+ }
+ }
+ return in
+}
--- a/helpers/templates.go
+++ /dev/null
@@ -1,56 +1,0 @@
-// Copyright © 2013 Steve Francia <spf@spf13.com>.
-//
-// Licensed under the Simple Public License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://opensource.org/licenses/Simple-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package helpers
-
-import (
- "net/url"
- "regexp"
- "strings"
- "unicode"
-)
-
-var sanitizeRegexp = regexp.MustCompile("[^a-zA-Z0-9./_-]")-
-func MakePath(s string) string {- return unicodeSanitize(strings.ToLower(strings.Replace(strings.TrimSpace(s), " ", "-", -1)))
-}
-
-func Urlize(uri string) string {- sanitized := MakePath(uri)
-
- // escape unicode letters
- parsedUri, err := url.Parse(sanitized)
- if err != nil {- // if net/url can not parse URL it's meaning Sanitize works incorrect
- panic(err)
- }
- return parsedUri.String()
-}
-
-func Sanitize(s string) string {- return sanitizeRegexp.ReplaceAllString(s, "")
-}
-
-func unicodeSanitize(s string) string {- source := []rune(s)
- target := make([]rune, 0, len(source))
-
- for _, r := range source {- if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '.' || r == '/' || r == '_' || r == '-' {- target = append(target, r)
- }
- }
-
- return string(target)
-}
--- a/helpers/templates_test.go
+++ /dev/null
@@ -1,45 +1,0 @@
-package helpers
-
-import (
- "testing"
-)
-
-func TestMakePath(t *testing.T) {- tests := []struct {- input string
- expected string
- }{- {" foo bar ", "foo-bar"},- {"foo.bar/foo_bar-foo", "foo.bar/foo_bar-foo"},- {"foo,bar:foo%bar", "foobarfoobar"},- {"foo/bar.html", "foo/bar.html"},- {"трям/трям", "трям/трям"},- }
-
- for _, test := range tests {- output := MakePath(test.input)
- if output != test.expected {- t.Errorf("Expected %#v, got %#v\n", test.expected, output)- }
- }
-}
-
-func TestUrlize(t *testing.T) {- tests := []struct {- input string
- expected string
- }{- {" foo bar ", "foo-bar"},- {"foo.bar/foo_bar-foo", "foo.bar/foo_bar-foo"},- {"foo,bar:foo%bar", "foobarfoobar"},- {"foo/bar.html", "foo/bar.html"},- {"трям/трям", "%D1%82%D1%80%D1%8F%D0%BC/%D1%82%D1%80%D1%8F%D0%BC"},- }
-
- for _, test := range tests {- output := Urlize(test.input)
- if output != test.expected {- t.Errorf("Expected %#v, got %#v\n", test.expected, output)- }
- }
-}
--- /dev/null
+++ b/helpers/url.go
@@ -1,0 +1,110 @@
+// Copyright © 2013 Steve Francia <spf@spf13.com>.
+//
+// Licensed under the Simple Public License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://opensource.org/licenses/Simple-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "fmt"
+ "net/url"
+ "path"
+)
+
+var _ = fmt.Println
+
+// Similar to MakePath, but with Unicode handling
+// Example:
+// uri: Vim (text editor)
+// urlize: vim-text-editor
+func Urlize(uri string) string {+ sanitized := MakePath(uri)
+
+ // escape unicode letters
+ parsedUri, err := url.Parse(sanitized)
+ if err != nil {+ // if net/url can not parse URL it's meaning Sanitize works incorrect
+ panic(err)
+ }
+ x := parsedUri.String()
+ return x
+}
+
+// Combines a base with a path
+// Example
+// base: http://spf13.com/
+// path: post/how-i-blog
+// result: http://spf13.com/post/how-i-blog
+func MakePermalink(host, plink string) *url.URL {+
+ base, err := url.Parse(host)
+ if err != nil {+ panic(err)
+ }
+
+ path, err := url.Parse(plink)
+ if err != nil {+ panic(err)
+ }
+ return base.ResolveReference(path)
+}
+
+func UrlPrep(ugly bool, in string) string {+ if ugly {+ return Uglify(in)
+ } else {+ return PrettifyUrl(in)
+ }
+}
+
+// Don't Return /index.html portion.
+func PrettifyUrl(in string) string {+ x := PrettifyPath(in)
+
+ if path.Base(x) == "index.html" {+ return path.Dir(x)
+ }
+
+ if in == "" {+ return "/"
+ }
+
+ return x
+}
+
+// /section/name/index.html -> /section/name.html
+// /section/name/ -> /section/name.html
+// /section/name.html -> /section/name.html
+func Uglify(in string) string {+ if path.Ext(in) == "" {+ if len(in) < 2 {+ return "/"
+ }
+ // /section/name/ -> /section/name.html
+ return path.Clean(in) + ".html"
+ } else {+ name, ext := FileAndExt(in)
+ if name == "index" {+ // /section/name/index.html -> /section/name.html
+ d := path.Dir(in)
+ if len(d) > 1 {+ return d + ext
+ } else {+ return in
+ }
+ } else {+ // /section/name.html -> /section/name.html
+ return path.Clean(in)
+ }
+ }
+
+ return in
+}
--- a/hugolib/config.go
+++ b/hugolib/config.go
@@ -17,6 +17,7 @@
"encoding/json"
"fmt"
"github.com/BurntSushi/toml"
+ "github.com/spf13/hugo/helpers"
"io/ioutil"
"launchpad.net/goyaml"
"os"
@@ -67,7 +68,7 @@
c.readInConfig()
// set index defaults if none provided
- if len(c.Indexes) == 0 {+ if c.Indexes == nil {c.Indexes = make(map[string]string)
c.Indexes["tag"] = "tags"
c.Indexes["category"] = "categories"
@@ -169,15 +170,15 @@
func (c *Config) findConfigFile(configFileName string) (string, error) { if configFileName == "" { // config not specified, let's search- if b, _ := exists(c.GetAbsPath("config.json")); b {+ if b, _ := helpers.Exists(c.GetAbsPath("config.json")); b { return c.GetAbsPath("config.json"), nil}
- if b, _ := exists(c.GetAbsPath("config.toml")); b {+ if b, _ := helpers.Exists(c.GetAbsPath("config.toml")); b { return c.GetAbsPath("config.toml"), nil}
- if b, _ := exists(c.GetAbsPath("config.yaml")); b {+ if b, _ := helpers.Exists(c.GetAbsPath("config.yaml")); b { return c.GetAbsPath("config.yaml"), nil}
@@ -191,7 +192,7 @@
// Else check the local directory
t := c.GetAbsPath(configFileName)
- if b, _ := exists(t); b {+ if b, _ := helpers.Exists(t); b {return t, nil
} else { return "", fmt.Errorf("config file not found at: %s", t)--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -76,7 +76,7 @@
func (p *Page) Plain() string { if len(p.plain) == 0 {- p.plain = StripHTML(StripShortcodes(string(p.renderBytes(p.rawContent))))
+ p.plain = helpers.StripHTML(StripShortcodes(string(p.renderBytes(p.rawContent))))
}
return p.plain
}
@@ -147,38 +147,6 @@
return &page
}
-func StripHTML(s string) string {- output := ""
-
- // Shortcut strings with no tags in them
- if !strings.ContainsAny(s, "<>") {- output = s
- } else {- s = strings.Replace(s, "\n", " ", -1)
- s = strings.Replace(s, "</p>", " \n", -1)
- s = strings.Replace(s, "<br>", " \n", -1)
- s = strings.Replace(s, "</br>", " \n", -1)
-
- // Walk through the string removing all tags
- b := new(bytes.Buffer)
- inTag := false
- for _, r := range s {- switch r {- case '<':
- inTag = true
- case '>':
- inTag = false
- default:
- if !inTag {- b.WriteRune(r)
- }
- }
- }
- output = b.String()
- }
- return output
-}
-
func (p *Page) IsRenderable() bool {return p.renderable
}
@@ -274,40 +242,17 @@
}
//fmt.Printf("have an override for %q in section %s → %s\n", p.Title, p.Section, permalink) } else {-
if len(pSlug) > 0 {- if p.Site.Config != nil && p.Site.Config.UglyUrls {- filename := fmt.Sprintf("%s.%s", p.Slug, p.Extension)- permalink = path.Join(dir, filename)
- } else {- permalink = path.Join(dir, p.Slug) + "/"
- }
+ permalink = helpers.UrlPrep(p.Site.Config.UglyUrls, path.Join(dir, p.Slug+"."+p.Extension))
} else if len(pUrl) > 2 {permalink = pUrl
} else {_, t := path.Split(p.FileName)
- if p.Site.Config != nil && p.Site.Config.UglyUrls {- x := replaceExtension(strings.TrimSpace(t), p.Extension)
- permalink = path.Join(dir, x)
- } else {- file, _ := fileExt(strings.TrimSpace(t))
- permalink = path.Join(dir, file)
- }
+ permalink = helpers.UrlPrep(p.Site.Config.UglyUrls, path.Join(dir, helpers.ReplaceExtension(strings.TrimSpace(t), p.Extension)))
}
-
}
- base, err := url.Parse(baseUrl)
- if err != nil {- return nil, err
- }
-
- path, err := url.Parse(permalink)
- if err != nil {- return nil, err
- }
-
- return MakePermalink(base, path), nil
+ return helpers.MakePermalink(baseUrl, permalink), nil
}
func (p *Page) LinkTitle() string {@@ -696,7 +641,7 @@
} else {// Fall back to filename
_, t := path.Split(p.FileName)
- outfile = replaceExtension(strings.TrimSpace(t), p.Extension)
+ outfile = helpers.ReplaceExtension(strings.TrimSpace(t), p.Extension)
}
return path.Join(p.Dir, strings.TrimSpace(outfile))
--- a/hugolib/page_permalink_test.go
+++ b/hugolib/page_permalink_test.go
@@ -17,9 +17,9 @@
}{ {"x/y/z/boofar.md", "x/y/z", "", "", false, "/x/y/z/boofar", "/x/y/z/boofar"}, {"x/y/z/boofar.md", "x/y/z/", "", "", false, "/x/y/z/boofar", "/x/y/z/boofar"},- {"x/y/z/boofar.md", "x/y/z/", "", "boofar", false, "/x/y/z/boofar/", "/x/y/z/boofar/"},+ {"x/y/z/boofar.md", "x/y/z/", "", "boofar", false, "/x/y/z/boofar", "/x/y/z/boofar"}, {"x/y/z/boofar.md", "x/y/z", "http://barnew/", "", false, "http://barnew/x/y/z/boofar", "/x/y/z/boofar"},- {"x/y/z/boofar.md", "x/y/z/", "http://barnew/", "boofar", false, "http://barnew/x/y/z/boofar/", "/x/y/z/boofar/"},+ {"x/y/z/boofar.md", "x/y/z/", "http://barnew/", "boofar", false, "http://barnew/x/y/z/boofar", "/x/y/z/boofar"}, {"x/y/z/boofar.md", "x/y/z", "", "", true, "/x/y/z/boofar.html", "/x/y/z/boofar.html"}, {"x/y/z/boofar.md", "x/y/z/", "", "", true, "/x/y/z/boofar.html", "/x/y/z/boofar.html"}, {"x/y/z/boofar.md", "x/y/z/", "", "boofar", true, "/x/y/z/boofar.html", "/x/y/z/boofar.html"},--- a/hugolib/path.go
+++ /dev/null
@@ -1,34 +1,0 @@
-package hugolib
-
-import (
- "os"
- "strings"
-)
-
-func fileExt(path string) (file, ext string) {- if strings.Contains(path, ".") {- i := len(path) - 1
- for path[i] != '.' {- i--
- }
- return path[:i], path[i+1:]
- }
- return path, ""
-}
-
-func replaceExtension(path string, newExt string) string {- f, _ := fileExt(path)
- return f + "." + newExt
-}
-
-// Check if Exists && is Directory
-func dirExists(path string) (bool, error) {- fi, err := os.Stat(path)
- if err == nil && fi.IsDir() {- return true, nil
- }
- if os.IsNotExist(err) {- return false, nil
- }
- return false, err
-}
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -25,7 +25,6 @@
"github.com/spf13/nitro"
"html/template"
"io"
- "net/url"
"os"
"strings"
"time"
@@ -35,10 +34,6 @@
var DefaultTimer *nitro.B
-func MakePermalink(base *url.URL, path *url.URL) *url.URL {- return base.ResolveReference(path)
-}
-
// Site contains all the information relevant for constructing a static
// site. The basic flow of information is as follows:
//
@@ -228,18 +223,6 @@
}
}
-// Check if File / Directory Exists
-func exists(path string) (bool, error) {- _, err := os.Stat(path)
- if err == nil {- return true, nil
- }
- if os.IsNotExist(err) {- return false, nil
- }
- return false, err
-}
-
func (s *Site) absLayoutDir() string {return s.Config.GetAbsPath(s.Config.LayoutDir)
}
@@ -253,12 +236,7 @@
}
func (s *Site) checkDirectories() (err error) {- /*
- if b, _ := dirExists(s.absLayoutDir()); !b {- return fmt.Errorf("No layout directory found, expecting to find it at " + s.absLayoutDir())- }
- */
- if b, _ := dirExists(s.absContentDir()); !b {+ if b, _ := helpers.DirExists(s.absContentDir()); !b { return fmt.Errorf("No source directory found, expecting to find it at " + s.absContentDir())}
return
@@ -418,18 +396,12 @@
for k, o := range s.Indexes[plural] {n := s.NewNode()
n.Title = strings.Title(k)
- url := helpers.Urlize(plural + "/" + k)
- n.Url = url + ".html"
- plink := n.Url
- n.Permalink = permalink(s, plink)
- n.RSSLink = permalink(s, url+".xml")
+ base := plural + "/" + k
+ s.setUrls(n, base)
n.Date = o[0].Page.Date
n.Data[singular] = o
n.Data["Pages"] = o.Pages()
layout := "indexes/" + singular + ".html"
-
- var base string
- base = plural + "/" + k
err := s.render(n, base+".html", layout)
if err != nil {return err
@@ -437,8 +409,7 @@
if a := s.Tmpl.Lookup("rss.xml"); a != nil {// XML Feed
- n.Url = helpers.Urlize(plural + "/" + k + ".xml")
- n.Permalink = permalink(s, n.Url)
+ s.setUrls(n, base+".xml")
err := s.render(n, base+".xml", "rss.xml")
if err != nil {return err
@@ -455,9 +426,7 @@
for singular, plural := range s.Config.Indexes {n := s.NewNode()
n.Title = strings.Title(plural)
- url := helpers.Urlize(plural)
- n.Url = url + "/index.html"
- n.Permalink = permalink(s, n.Url)
+ s.setUrls(n, plural)
n.Data["Singular"] = singular
n.Data["Plural"] = plural
n.Data["Index"] = s.Indexes[plural]
@@ -477,9 +446,7 @@
for section, data := range s.Sections {n := s.NewNode()
n.Title = strings.Title(inflect.Pluralize(section))
- n.Url = helpers.Urlize(section + "/" + "index.html")
- n.Permalink = permalink(s, n.Url)
- n.RSSLink = permalink(s, section+".xml")
+ s.setUrls(n, section)
n.Date = data[0].Page.Date
n.Data["Pages"] = data.Pages()
layout := "indexes/" + section + ".html"
@@ -491,8 +458,7 @@
if a := s.Tmpl.Lookup("rss.xml"); a != nil {// XML Feed
- n.Url = helpers.Urlize(section + ".xml")
- n.Permalink = template.HTML(string(n.Site.BaseUrl) + n.Url)
+ s.setUrls(n, section+".xml")
err = s.render(n, section+".xml", "rss.xml")
if err != nil {return err
@@ -505,9 +471,7 @@
func (s *Site) RenderHomePage() error {n := s.NewNode()
n.Title = n.Site.Title
- n.Url = helpers.Urlize(string(n.Site.BaseUrl))
- n.RSSLink = permalink(s, "index.xml")
- n.Permalink = permalink(s, "")
+ s.setUrls(n, "/")
n.Data["Pages"] = s.Pages
err := s.render(n, "/", "index.html")
if err != nil {@@ -518,7 +482,7 @@
// XML Feed
n.Url = helpers.Urlize("index.xml")n.Title = "Recent Content"
- n.Permalink = permalink(s, "index.xml")
+ n.Permalink = s.permalink("index.xml")high := 50
if len(s.Pages) < high {high = len(s.Pages)
@@ -536,7 +500,7 @@
if a := s.Tmpl.Lookup("404.html"); a != nil { n.Url = helpers.Urlize("404.html")n.Title = "404 Page not found"
- n.Permalink = permalink(s, "404.html")
+ n.Permalink = s.permalink("404.html")return s.render(n, "404.html", "404.html")
}
@@ -550,18 +514,26 @@
}
}
-func permalink(s *Site, plink string) template.HTML {- base, err := url.Parse(string(s.Config.BaseUrl))
- if err != nil {- panic(err)
- }
+func (s *Site) setUrls(n *Node, in string) {+ n.Url = s.prepUrl(in)
+ n.Permalink = s.permalink(n.Url)
+ n.RSSLink = s.permalink(in + ".xml")
+}
- path, err := url.Parse(plink)
- if err != nil {- panic(err)
- }
+func (s *Site) permalink(plink string) template.HTML {+ return template.HTML(helpers.MakePermalink(string(s.Config.BaseUrl), s.prepUrl(plink)).String())
+}
- return template.HTML(MakePermalink(base, path).String())
+func (s *Site) prepUrl(in string) string {+ return helpers.Urlize(s.PrettifyUrl(in))
+}
+
+func (s *Site) PrettifyUrl(in string) string {+ return helpers.UrlPrep(s.Config.UglyUrls, in)
+}
+
+func (s *Site) PrettifyPath(in string) string {+ return helpers.PathPrep(s.Config.UglyUrls, in)
}
func (s *Site) NewNode() *Node {--
⑨