shithub: hugo

Download patch

ref: 8b8fb417ae065954a056ea018bd092748cc9127c
parent: ca69cad8aa5d8eefbce13abd97b2b979339a458c
author: Anthony Fok <[email protected]>
date: Tue Mar 17 18:16:54 EDT 2015

More initialism corrections (golint)

Thanks to @bep's new, brilliant helpers.Deprecated() function,
the following functions or variables are transitioned to their
new names, preserving backward compatibility for v0.14
and warning the user of upcoming obsolescence in v0.15:

 * .Url → .URL (for node, menu and paginator)
 * .Site.BaseUrl → .Site.BaseURL
 * .Site.Indexes → .Site.Taxonomies
 * .Site.Recent → .Site.Pages
 * getJson → getJSON
 * getCsv → getCSV
 * safeHtml → safeHTML
 * safeCss → safeCSS
 * safeUrl → safeURL

Also fix related initialisms in strings and comments.

Continued effort in fixing #959.

--- a/commands/server.go
+++ b/commands/server.go
@@ -135,7 +135,7 @@
 	}
 }
 
-// fixUrl massages the BaseUrl into a form needed for serving
+// fixURL massages the BaseURL into a form needed for serving
 // all pages correctly.
 func fixURL(s string) (string, error) {
 	useLocalhost := false
@@ -164,7 +164,7 @@
 		if strings.Contains(host, ":") {
 			host, _, err = net.SplitHostPort(u.Host)
 			if err != nil {
-				return "", fmt.Errorf("Failed to split BaseUrl hostpost: %s", err)
+				return "", fmt.Errorf("Failed to split BaseURL hostpost: %s", err)
 			}
 		}
 		u.Host = fmt.Sprintf("%s:%d", host, serverPort)
--- a/helpers/path.go
+++ b/helpers/path.go
@@ -330,7 +330,7 @@
 	return PrettifyPath(in)
 }
 
-// Same as PrettifyUrlPath() but for file paths.
+// Same as PrettifyURLPath() but for file paths.
 //     /section/name.html       becomes /section/name/index.html
 //     /section/name/           becomes /section/name/index.html
 //     /section/name/index.html becomes /section/name/index.html
--- a/helpers/url.go
+++ b/helpers/url.go
@@ -69,7 +69,7 @@
 	// in issues #157, #622, etc., without forcing
 	// relative URLs to begin with '/'.
 	// Once the fixes are in, let's remove this kludge
-	// and restore SanitizeUrl() to the way it was.
+	// and restore SanitizeURL() to the way it was.
 	//                         -- @anthonyfok, 2015-02-16
 	//
 	// Begin temporary kludge
@@ -87,12 +87,12 @@
 
 }
 
-// SanitizeUrl sanitizes the input URL string.
+// SanitizeURL sanitizes the input URL string.
 func SanitizeURL(in string) string {
 	return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveTrailingSlash|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
 }
 
-// SanitizeUrlKeepTrailingSlash is the same as SanitizeUrl, but will keep any trailing slash.
+// SanitizeURLKeepTrailingSlash is the same as SanitizeURL, but will keep any trailing slash.
 func SanitizeURLKeepTrailingSlash(in string) string {
 	return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
 }
@@ -147,7 +147,7 @@
 
 // AddContextRoot adds the context root to an URL if it's not already set.
 // For relative URL entries on sites with a base url with a context root set (i.e. http://example.com/mysite),
-// relative URLs must not include the context root if canonifyUrls is enabled. But if it's disabled, it must be set.
+// relative URLs must not include the context root if canonifyURLs is enabled. But if it's disabled, it must be set.
 func AddContextRoot(baseURL, relativePath string) string {
 
 	url, err := url.Parse(baseURL)
@@ -185,7 +185,7 @@
 	return url
 }
 
-// PrettifyUrl takes a URL string and returns a semantic, clean URL.
+// PrettifyURL takes a URL string and returns a semantic, clean URL.
 func PrettifyURL(in string) string {
 	x := PrettifyURLPath(in)
 
@@ -200,7 +200,7 @@
 	return x
 }
 
-// PrettifyUrlPath takes a URL path to a content and converts it
+// PrettifyURLPath takes a URL path to a content and converts it
 // to enable pretty URLs.
 //     /section/name.html       becomes /section/name/index.html
 //     /section/name/           becomes /section/name/index.html
@@ -209,7 +209,7 @@
 	return PrettiyPath(in, pathBridge)
 }
 
-// Uglify does the opposite of PrettifyUrlPath().
+// Uglify does the opposite of PrettifyURLPath().
 //     /section/name/index.html becomes /section/name.html
 //     /section/name/           becomes /section/name.html
 //     /section/name.html       becomes /section/name.html
--- a/helpers/url_test.go
+++ b/helpers/url_test.go
@@ -6,7 +6,7 @@
 	"testing"
 )
 
-func TestUrlize(t *testing.T) {
+func TestURLize(t *testing.T) {
 	tests := []struct {
 		input    string
 		expected string
@@ -26,7 +26,7 @@
 	}
 }
 
-func TestSanitizeUrl(t *testing.T) {
+func TestSanitizeURL(t *testing.T) {
 	tests := []struct {
 		input    string
 		expected string
@@ -76,7 +76,7 @@
 	}
 }
 
-func TestUrlPrep(t *testing.T) {
+func TestURLPrep(t *testing.T) {
 	type test struct {
 		ugly   bool
 		input  string
--- a/hugolib/menu.go
+++ b/hugolib/menu.go
@@ -19,10 +19,11 @@
 	"strings"
 
 	"github.com/spf13/cast"
+	"github.com/spf13/hugo/helpers"
 )
 
 type MenuEntry struct {
-	Url        string
+	URL        string
 	Name       string
 	Menu       string
 	Identifier string
@@ -37,6 +38,12 @@
 type Menus map[string]*Menu
 type PageMenus map[string]*MenuEntry
 
+// Url is deprecated. Will be removed in 0.15.
+func (me *MenuEntry) Url() string {
+	helpers.Deprecated("MenuEntry", ".Url", ".URL")
+	return me.URL
+}
+
 func (me *MenuEntry) AddChild(child *MenuEntry) {
 	me.Children = append(me.Children, child)
 	me.Children.Sort()
@@ -53,11 +60,11 @@
 	return me.Name
 }
 
-func (me *MenuEntry) hopefullyUniqueId() string {
+func (me *MenuEntry) hopefullyUniqueID() string {
 	if me.Identifier != "" {
 		return me.Identifier
-	} else if me.Url != "" {
-		return me.Url
+	} else if me.URL != "" {
+		return me.URL
 	} else {
 		return me.Name
 	}
@@ -64,11 +71,11 @@
 }
 
 func (me *MenuEntry) IsEqual(inme *MenuEntry) bool {
-	return me.hopefullyUniqueId() == inme.hopefullyUniqueId() && me.Parent == inme.Parent
+	return me.hopefullyUniqueID() == inme.hopefullyUniqueID() && me.Parent == inme.Parent
 }
 
 func (me *MenuEntry) IsSameResource(inme *MenuEntry) bool {
-	return me.Url != "" && inme.Url != "" && me.Url == inme.Url
+	return me.URL != "" && inme.URL != "" && me.URL == inme.URL
 }
 
 func (me *MenuEntry) MarshallMap(ime map[string]interface{}) {
@@ -76,7 +83,7 @@
 		loki := strings.ToLower(k)
 		switch loki {
 		case "url":
-			me.Url = cast.ToString(v)
+			me.URL = cast.ToString(v)
 		case "weight":
 			me.Weight = cast.ToInt(v)
 		case "name":
--- a/hugolib/menu_test.go
+++ b/hugolib/menu_test.go
@@ -180,8 +180,8 @@
 	assert.NotNil(t, me1)
 	assert.NotNil(t, me2)
 
-	assert.True(t, strings.Contains(me1.Url, "doc1"))
-	assert.True(t, strings.Contains(me2.Url, "doc2"))
+	assert.True(t, strings.Contains(me1.URL, "doc1"))
+	assert.True(t, strings.Contains(me2.URL, "doc2"))
 
 }
 
@@ -216,8 +216,8 @@
 	assert.NotNil(t, me1)
 	assert.NotNil(t, me2)
 
-	assert.True(t, strings.Contains(me1.Url, "doc1"))
-	assert.True(t, strings.Contains(me2.Url, "doc2"))
+	assert.True(t, strings.Contains(me1.URL, "doc1"))
+	assert.True(t, strings.Contains(me2.URL, "doc2"))
 
 }
 
@@ -275,7 +275,7 @@
 
 	assert.NotNil(t, me)
 
-	assert.Equal(t, "/Zoo/resource/#anchor", me.Url)
+	assert.Equal(t, "/Zoo/resource/#anchor", me.URL)
 }
 
 // issue #719
@@ -309,7 +309,7 @@
 		expected = expectedBase + "/"
 	}
 
-	assert.Equal(t, expected, unicodeRussian.Url, "uglyURLs[%t]", uglyURLs)
+	assert.Equal(t, expected, unicodeRussian.URL, "uglyURLs[%t]", uglyURLs)
 }
 
 func TestTaxonomyNodeMenu(t *testing.T) {
@@ -329,7 +329,7 @@
 		{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
 			ts.findTestMenuEntryByID("tax", "2"), true, false},
 		{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
-			&MenuEntry{Name: "Somewhere else", Url: "/somewhereelse"}, false, false},
+			&MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false},
 	} {
 
 		n, _ := ts.site.newTaxonomyNode(this.taxInfo)
@@ -349,7 +349,7 @@
 
 	menuEntryXML := ts.findTestMenuEntryByID("tax", "xml")
 
-	if strings.HasSuffix(menuEntryXML.Url, "/") {
+	if strings.HasSuffix(menuEntryXML.URL, "/") {
 		t.Error("RSS menu item should not be padded with trailing slash")
 	}
 }
@@ -359,7 +359,7 @@
 	defer resetMenuTestState(ts)
 
 	home := ts.site.newHomeNode()
-	homeMenuEntry := &MenuEntry{Name: home.Title, Url: home.Url}
+	homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL}
 
 	for i, this := range []struct {
 		menu           string
@@ -369,7 +369,7 @@
 	}{
 		{"main", homeMenuEntry, true, false},
 		{"doesnotexist", homeMenuEntry, false, false},
-		{"main", &MenuEntry{Name: "Somewhere else", Url: "/somewhereelse"}, false, false},
+		{"main", &MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false},
 		{"grandparent", ts.findTestMenuEntryByID("grandparent", "grandparentId"), false, false},
 		{"grandparent", ts.findTestMenuEntryByID("grandparent", "parentId"), false, true},
 		{"grandparent", ts.findTestMenuEntryByID("grandparent", "grandchildId"), true, false},
--- a/hugolib/node.go
+++ b/hugolib/node.go
@@ -17,6 +17,7 @@
 	"html/template"
 	"sync"
 	"time"
+	"github.com/spf13/hugo/helpers"
 )
 
 type Node struct {
@@ -30,7 +31,7 @@
 	Params      map[string]interface{}
 	Date        time.Time
 	Sitemap     Sitemap
-	UrlPath
+	URLPath
 	paginator     *pager
 	paginatorInit sync.Once
 	scratch       *Scratch
@@ -42,7 +43,7 @@
 
 func (n *Node) HasMenuCurrent(menuID string, inme *MenuEntry) bool {
 	if inme.HasChildren() {
-		me := MenuEntry{Name: n.Title, Url: n.Url}
+		me := MenuEntry{Name: n.Title, URL: n.URL}
 
 		for _, child := range inme.Children {
 			if me.IsSameResource(child) {
@@ -56,7 +57,7 @@
 
 func (n *Node) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
 
-	me := MenuEntry{Name: n.Title, Url: n.Url}
+	me := MenuEntry{Name: n.Title, URL: n.URL}
 	if !me.IsSameResource(inme) {
 		return false
 	}
@@ -119,11 +120,17 @@
 	return n.Site.RelRef(ref, nil)
 }
 
-type UrlPath struct {
-	Url       string
+type URLPath struct {
+	URL       string
 	Permalink template.HTML
 	Slug      string
 	Section   string
+}
+
+// Url is deprecated. Will be removed in 0.15.
+func (n *Node) Url() string {
+	helpers.Deprecated("Node", ".Url", ".URL")
+	return n.URL
 }
 
 // Scratch returns the writable context associated with this Node.
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -341,10 +341,10 @@
 }
 
 func (p *Page) permalink() (*url.URL, error) {
-	baseURL := string(p.Site.BaseUrl)
+	baseURL := string(p.Site.BaseURL)
 	dir := strings.TrimSpace(filepath.ToSlash(p.Source.Dir()))
 	pSlug := strings.TrimSpace(p.Slug)
-	pURL := strings.TrimSpace(p.Url)
+	pURL := strings.TrimSpace(p.URL)
 	var permalink string
 	var err error
 
@@ -420,9 +420,9 @@
 	}
 
 	if viper.GetBool("CanonifyURLs") {
-		// replacements for relpermalink with baseUrl on the form http://myhost.com/sub/ will fail later on
-		// have to return the Url relative from baseUrl
-		relpath, err := helpers.GetRelativePath(link.String(), string(p.Site.BaseUrl))
+		// replacements for relpermalink with baseURL on the form http://myhost.com/sub/ will fail later on
+		// have to return the URL relative from baseURL
+		relpath, err := helpers.GetRelativePath(link.String(), string(p.Site.BaseURL))
 		if err != nil {
 			return "", err
 		}
@@ -455,9 +455,9 @@
 			p.Slug = helpers.URLize(cast.ToString(v))
 		case "url":
 			if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
-				return fmt.Errorf("Only relative urls are supported, %v provided", url)
+				return fmt.Errorf("Only relative URLs are supported, %v provided", url)
 			}
-			p.Url = helpers.URLize(cast.ToString(v))
+			p.URL = helpers.URLize(cast.ToString(v))
 		case "type":
 			p.contentType = cast.ToString(v)
 		case "extension", "ext":
@@ -588,7 +588,7 @@
 		if ms, ok := p.Params["menu"]; ok {
 			link, _ := p.RelPermalink()
 
-			me := MenuEntry{Name: p.LinkTitle(), Weight: p.Weight, Url: link}
+			me := MenuEntry{Name: p.LinkTitle(), Weight: p.Weight, URL: link}
 
 			// Could be the name of the menu to attach it to
 			mname, err := cast.ToStringE(ms)
@@ -618,7 +618,7 @@
 			}
 
 			for name, menu := range menus {
-				menuEntry := MenuEntry{Name: p.LinkTitle(), Url: link, Weight: p.Weight, Menu: name}
+				menuEntry := MenuEntry{Name: p.LinkTitle(), URL: link, Weight: p.Weight, Menu: name}
 				jww.DEBUG.Printf("found menu: %q, in %q\n", name, p.Title)
 
 				ime, err := cast.ToStringMapE(menu)
@@ -785,9 +785,9 @@
 
 func (p *Page) TargetPath() (outfile string) {
 
-	// Always use Url if it's specified
-	if len(strings.TrimSpace(p.Url)) > 2 {
-		outfile = strings.TrimSpace(p.Url)
+	// Always use URL if it's specified
+	if len(strings.TrimSpace(p.URL)) > 2 {
+		outfile = strings.TrimSpace(p.URL)
 
 		if strings.HasSuffix(outfile, "/") {
 			outfile = outfile + "index.html"
--- a/hugolib/page_permalink_test.go
+++ b/hugolib/page_permalink_test.go
@@ -35,7 +35,7 @@
 		{"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo/", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"},
 		{"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"},
 
-		// test url overrides
+		// test URL overrides
 		{"x/y/z/boofar.md", "x/y/z", "", "", "/z/y/q/", false, false, "/z/y/q/", "/z/y/q/"},
 	}
 
@@ -46,12 +46,12 @@
 		viper.Set("canonifyurls", test.canonifyURLs)
 		p := &Page{
 			Node: Node{
-				UrlPath: UrlPath{
+				URLPath: URLPath{
 					Section: "z",
-					Url:     test.url,
+					URL:     test.url,
 				},
 				Site: &SiteInfo{
-					BaseUrl: test.base,
+					BaseURL: test.base,
 				},
 			},
 			Source: Source{File: *source.NewFile(filepath.FromSlash(test.file))},
--- a/hugolib/pagination.go
+++ b/hugolib/pagination.go
@@ -47,11 +47,17 @@
 	return p.number
 }
 
-// Url returns the url to the current page.
-func (p *pager) Url() template.HTML {
+// URL returns the URL to the current page.
+func (p *pager) URL() template.HTML {
 	return template.HTML(p.paginationURLFactory(p.PageNumber()))
 }
 
+// Url is deprecated. Will be removed in 0.15.
+func (p *pager) Url() template.HTML {
+    helpers.Deprecated("Paginator", ".Url", ".URL")
+    return p.URL()
+}
+
 // Pages returns the elements on this page.
 func (p *pager) Pages() Pages {
 	if len(p.paginatedPages) == 0 {
@@ -142,7 +148,7 @@
 			return
 		}
 
-		pagers, err := paginatePages(n.Data["Pages"], n.Url)
+		pagers, err := paginatePages(n.Data["Pages"], n.URL)
 
 		if err != nil {
 			initError = err
@@ -184,7 +190,7 @@
 		if n.paginator != nil {
 			return
 		}
-		pagers, err := paginatePages(seq, n.Url)
+		pagers, err := paginatePages(seq, n.URL)
 
 		if err != nil {
 			initError = err
--- a/hugolib/pagination_test.go
+++ b/hugolib/pagination_test.go
@@ -43,7 +43,7 @@
 	assert.Equal(t, 5, paginator.TotalPages())
 
 	first := paginatorPages[0]
-	assert.Equal(t, "page/1/", first.Url())
+	assert.Equal(t, "page/1/", first.URL())
 	assert.Equal(t, first, first.First())
 	assert.True(t, first.HasNext())
 	assert.Equal(t, paginatorPages[1], first.Next())
@@ -58,7 +58,7 @@
 	assert.Equal(t, paginatorPages[1], third.Prev())
 
 	last := paginatorPages[4]
-	assert.Equal(t, "page/5/", last.Url())
+	assert.Equal(t, "page/5/", last.URL())
 	assert.Equal(t, last, last.Last())
 	assert.False(t, last.HasNext())
 	assert.Nil(t, last.Next())
@@ -97,7 +97,7 @@
 
 }
 
-func TestPaginationUrlFactory(t *testing.T) {
+func TestPaginationURLFactory(t *testing.T) {
 	viper.Set("PaginatePath", "zoo")
 	unicode := newPaginationURLFactory("новости проекта")
 	fooBar := newPaginationURLFactory("foo", "bar")
@@ -197,12 +197,12 @@
 	for i := 0; i < num; i++ {
 		pages[i] = &Page{
 			Node: Node{
-				UrlPath: UrlPath{
+				URLPath: URLPath{
 					Section: "z",
-					Url:     fmt.Sprintf("http://base/x/y/p%d.html", num),
+					URL:     fmt.Sprintf("http://base/x/y/p%d.html", num),
 				},
 				Site: &SiteInfo{
-					BaseUrl: "http://base/",
+					BaseURL: "http://base/",
 				},
 			},
 			Source: Source{File: *source.NewFile(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", num)))},
--- a/hugolib/permalinks.go
+++ b/hugolib/permalinks.go
@@ -138,7 +138,7 @@
 // pageToPermalinkTitle returns the URL-safe form of the title
 func pageToPermalinkTitle(p *Page, _ string) (string, error) {
 	// Page contains Node which has Title
-	// (also contains UrlPath which has Slug, sometimes)
+	// (also contains URLPath which has Slug, sometimes)
 	return helpers.URLize(p.Title), nil
 }
 
@@ -166,7 +166,7 @@
 }
 
 func pageToPermalinkSection(p *Page, _ string) (string, error) {
-	// Page contains Node contains UrlPath which has Section
+	// Page contains Node contains URLPath which has Section
 	return p.Section(), nil
 }
 
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -93,15 +93,13 @@
 }
 
 type SiteInfo struct {
-	BaseUrl             template.URL
+	BaseURL             template.URL
 	Taxonomies          TaxonomyList
 	Authors             AuthorList
 	Social              SiteSocial
-	Indexes             *TaxonomyList // legacy, should be identical to Taxonomies
 	Sections            Taxonomy
 	Pages               *Pages
 	Files               []*source.File
-	Recent              *Pages // legacy, should be identical to Pages
 	Menus               *Menus
 	Hugo                *HugoInfo
 	Title               string
@@ -133,6 +131,24 @@
 // linkedin
 type SiteSocial map[string]string
 
+// BaseUrl is deprecated. Will be removed in 0.15.
+func (s *SiteInfo) BaseUrl() template.URL {
+	helpers.Deprecated("Site", ".BaseUrl", ".BaseURL")
+	return s.BaseURL
+}
+
+// Recent is deprecated. Will be removed in 0.15.
+func (s *SiteInfo) Recent() *Pages {
+	helpers.Deprecated("Site", ".Recent", ".Pages")
+	return s.Pages
+}
+
+// Indexes is deprecated. Will be removed in 0.15.
+func (s *SiteInfo) Indexes() *TaxonomyList {
+	helpers.Deprecated("Site", ".Indexes", ".Taxonomies")
+	return &s.Taxonomies
+}
+
 func (s *SiteInfo) GetParam(key string) interface{} {
 	v := s.Params[strings.ToLower(key)]
 
@@ -445,7 +461,7 @@
 	}
 
 	s.Info = SiteInfo{
-		BaseUrl:         template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))),
+		BaseURL:         template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))),
 		Title:           viper.GetString("Title"),
 		Author:          viper.GetStringMap("author"),
 		LanguageCode:    viper.GetString("languagecode"),
@@ -454,7 +470,6 @@
 		BuildDrafts:     viper.GetBool("BuildDrafts"),
 		canonifyURLs:    viper.GetBool("CanonifyURLs"),
 		Pages:           &s.Pages,
-		Recent:          &s.Pages,
 		Menus:           &s.Menus,
 		Params:          params,
 		Permalinks:      permalinks,
@@ -705,14 +720,14 @@
 
 					menuEntry.MarshallMap(ime)
 
-					if strings.HasPrefix(menuEntry.Url, "/") {
+					if strings.HasPrefix(menuEntry.URL, "/") {
 						// make it match the nodes
-						menuEntryURL := menuEntry.Url
+						menuEntryURL := menuEntry.URL
 						menuEntryURL = helpers.URLizeAndPrep(menuEntryURL)
 						if !s.Info.canonifyURLs {
-							menuEntryURL = helpers.AddContextRoot(string(s.Info.BaseUrl), menuEntryURL)
+							menuEntryURL = helpers.AddContextRoot(string(s.Info.BaseURL), menuEntryURL)
 						}
-						menuEntry.Url = menuEntryURL
+						menuEntry.URL = menuEntryURL
 					}
 
 					if ret[name] == nil {
@@ -764,8 +779,8 @@
 	for p, childmenu := range children {
 		_, ok := flat[twoD{p.MenuName, p.EntryName}]
 		if !ok {
-			// if parent does not exist, create one without a url
-			flat[twoD{p.MenuName, p.EntryName}] = &MenuEntry{Name: p.EntryName, Url: ""}
+			// if parent does not exist, create one without a URL
+			flat[twoD{p.MenuName, p.EntryName}] = &MenuEntry{Name: p.EntryName, URL: ""}
 		}
 		flat[twoD{p.MenuName, p.EntryName}].Children = childmenu
 	}
@@ -818,7 +833,6 @@
 	}
 
 	s.Info.Taxonomies = s.Taxonomies
-	s.Info.Indexes = &s.Taxonomies
 	s.Info.Sections = s.Sections
 }
 
@@ -1021,7 +1035,7 @@
 	base := t.plural + "/" + t.key
 	n := s.NewNode()
 	n.Title = strings.Replace(strings.Title(t.key), "-", " ", -1)
-	s.setUrls(n, base)
+	s.setURLs(n, base)
 	if len(t.pages) > 0 {
 		n.Date = t.pages[0].Page.Date
 	}
@@ -1081,7 +1095,7 @@
 
 		if !viper.GetBool("DisableRSS") {
 			// XML Feed
-			n.Url = s.permalinkStr(base + "/index.xml")
+			n.URL = s.permalinkStr(base + "/index.xml")
 			n.Permalink = s.permalink(base)
 			rssLayouts := []string{"taxonomy/" + t.singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
 
@@ -1099,7 +1113,7 @@
 	for singular, plural := range taxonomies {
 		n := s.NewNode()
 		n.Title = strings.Title(plural)
-		s.setUrls(n, plural)
+		s.setURLs(n, plural)
 		n.Data["Singular"] = singular
 		n.Data["Plural"] = plural
 		n.Data["Terms"] = s.Taxonomies[plural]
@@ -1125,7 +1139,7 @@
 	} else {
 		n.Title = strings.Title(section)
 	}
-	s.setUrls(n, section)
+	s.setURLs(n, section)
 	n.Date = data[0].Page.Date
 	n.Data["Pages"] = data.Pages()
 
@@ -1175,7 +1189,7 @@
 
 		if !viper.GetBool("DisableRSS") && section != "" {
 			// XML Feed
-			n.Url = s.permalinkStr(section + "/index.xml")
+			n.URL = s.permalinkStr(section + "/index.xml")
 			n.Permalink = s.permalink(section)
 			rssLayouts := []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
 			if err := s.renderAndWriteXML("section "+section+" rss", section+"/index.xml", n, s.appendThemeTemplates(rssLayouts)...); err != nil {
@@ -1189,7 +1203,7 @@
 func (s *Site) newHomeNode() *Node {
 	n := s.NewNode()
 	n.Title = n.Site.Title
-	s.setUrls(n, "/")
+	s.setURLs(n, "/")
 	n.Data["Pages"] = s.Pages
 	return n
 }
@@ -1232,7 +1246,7 @@
 
 	if !viper.GetBool("DisableRSS") {
 		// XML Feed
-		n.Url = s.permalinkStr("index.xml")
+		n.URL = s.permalinkStr("index.xml")
 		n.Title = ""
 		high := 50
 		if len(s.Pages) < high {
@@ -1250,7 +1264,7 @@
 		}
 	}
 
-	n.Url = helpers.URLize("404.html")
+	n.URL = helpers.URLize("404.html")
 	n.Title = "404 Page not found"
 	n.Permalink = s.permalink("404.html")
 
@@ -1277,7 +1291,7 @@
 	page := &Page{}
 	page.Date = s.Info.LastChange
 	page.Site = &s.Info
-	page.Url = "/"
+	page.URL = "/"
 
 	pages = append(pages, page)
 	pages = append(pages, s.Pages...)
@@ -1315,9 +1329,9 @@
 	}
 }
 
-func (s *Site) setUrls(n *Node, in string) {
-	n.Url = helpers.URLizeAndPrep(in)
-	n.Permalink = s.permalink(n.Url)
+func (s *Site) setURLs(n *Node, in string) {
+	n.URL = helpers.URLizeAndPrep(in)
+	n.Permalink = s.permalink(n.URL)
 	n.RSSLink = s.permalink(in + ".xml")
 }
 
--- a/target/page_test.go
+++ b/target/page_test.go
@@ -62,7 +62,7 @@
 	}
 }
 
-func TestTranslateUglyUrls(t *testing.T) {
+func TestTranslateUglyURLs(t *testing.T) {
 	tests := []struct {
 		content  string
 		expected string
--- a/tpl/template.go
+++ b/tpl/template.go
@@ -938,7 +938,7 @@
 	return template.HTML(text)
 }
 
-// "safeHtmlAttr" is currently disabled, pending further discussion
+// "safeHTMLAttr" is currently disabled, pending further discussion
 // on its use case.  2015-01-19
 func SafeHTMLAttr(text string) template.HTMLAttr {
 	return template.HTMLAttr(text)
@@ -1308,11 +1308,8 @@
 		"isset":       IsSet,
 		"echoParam":   ReturnWhenSet,
 		"safeHTML":    SafeHTML,
-		"safeHtml":    SafeHTML,
 		"safeCSS":     SafeCSS,
-		"safeCss":     SafeCSS,
 		"safeURL":     SafeURL,
-		"safeUrl":     SafeURL,
 		"markdownify": Markdownify,
 		"first":       First,
 		"where":       Where,
@@ -1337,11 +1334,35 @@
 		"trim":        Trim,
 		"dateFormat":  DateFormat,
 		"getJSON":     GetJSON,
-		"getJson":     GetJSON,
 		"getCSV":      GetCSV,
-		"getCsv":      GetCSV,
 		"seq":         helpers.Seq,
 		"getenv":      func(varName string) string { return os.Getenv(varName) },
+
+		// "getJson" is deprecated. Will be removed in 0.15.
+		"getJson": func(urlParts ...string) interface{} {
+			helpers.Deprecated("Template", "getJson", "getJSON")
+			return GetJSON(urlParts...)
+		},
+		// "getJson" is deprecated. Will be removed in 0.15.
+		"getCsv": func(sep string, urlParts ...string) [][]string {
+			helpers.Deprecated("Template", "getCsv", "getCSV")
+			return GetCSV(sep, urlParts...)
+		},
+		// "safeHtml" is deprecated. Will be removed in 0.15.
+		"safeHtml": func(text string) template.HTML {
+			helpers.Deprecated("Template", "safeHtml", "safeHTML")
+			return SafeHTML(text)
+		},
+		// "safeCss" is deprecated. Will be removed in 0.15.
+		"safeCss": func(text string) template.CSS {
+			helpers.Deprecated("Template", "safeCss", "safeCSS")
+			return SafeCSS(text)
+		},
+		// "safeUrl" is deprecated. Will be removed in 0.15.
+		"safeUrl": func(text string) template.URL {
+			helpers.Deprecated("Template", "safeUrl", "safeURL")
+			return SafeURL(text)
+		},
 	}
 
 }
--- a/tpl/template_embedded.go
+++ b/tpl/template_embedded.go
@@ -55,13 +55,13 @@
     <managingEditor>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</managingEditor>{{end}}{{ with .Site.Author.email }}
     <webMaster>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</webMaster>{{end}}{{ with .Site.Copyright }}
     <copyright>{{.}}</copyright>{{end}}{{ if not .Date.IsZero }}
-    <lastBuildDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHtml }}</lastBuildDate>{{ end }}
-    <atom:link href="{{.Url}}" rel="self" type="application/rss+xml" />
+    <lastBuildDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHTML }}</lastBuildDate>{{ end }}
+    <atom:link href="{{.URL}}" rel="self" type="application/rss+xml" />
     {{ range first 15 .Data.Pages }}
     <item>
       <title>{{ .Title }}</title>
       <link>{{ .Permalink }}</link>
-      <pubDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHtml }}</pubDate>
+      <pubDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHTML }}</pubDate>
       {{ with .Site.Author.email }}<author>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</author>{{end}}
       <guid>{{ .Permalink }}</guid>
       <description>{{ .Content | html }}</description>
@@ -74,7 +74,7 @@
   {{ range .Data.Pages }}
   <url>
     <loc>{{ .Permalink }}</loc>{{ if not .Date.IsZero }}
-    <lastmod>{{ safeHtml ( .Date.Format "2006-01-02T15:04:05-07:00" ) }}</lastmod>{{ end }}{{ with .Sitemap.ChangeFreq }}
+    <lastmod>{{ safeHTML ( .Date.Format "2006-01-02T15:04:05-07:00" ) }}</lastmod>{{ end }}{{ with .Sitemap.ChangeFreq }}
     <changefreq>{{ . }}</changefreq>{{ end }}{{ if ge .Sitemap.Priority 0.0 }}
     <priority>{{ .Sitemap.Priority }}</priority>{{ end }}
   </url>
@@ -86,24 +86,24 @@
     <ul class="pagination">
         {{ with $pag.First }}
         <li>
-            <a href="{{ .Url }}" aria-label="First"><span aria-hidden="true">&laquo;&laquo;</span></a>
+            <a href="{{ .URL }}" aria-label="First"><span aria-hidden="true">&laquo;&laquo;</span></a>
         </li>
         {{ end }}
         <li
         {{ if not $pag.HasPrev }}class="disabled"{{ end }}>
-        <a href="{{ if $pag.HasPrev }}{{ $pag.Prev.Url }}{{ end }}" aria-label="Previous"><span aria-hidden="true">&laquo;</span></a>
+        <a href="{{ if $pag.HasPrev }}{{ $pag.Prev.URL }}{{ end }}" aria-label="Previous"><span aria-hidden="true">&laquo;</span></a>
         </li>
         {{ range $pag.Pagers }}
         <li
-        {{ if eq . $pag }}class="active"{{ end }}><a href="{{ .Url }}">{{ .PageNumber }}</a></li>
+        {{ if eq . $pag }}class="active"{{ end }}><a href="{{ .URL }}">{{ .PageNumber }}</a></li>
         {{ end }}
         <li
         {{ if not $pag.HasNext }}class="disabled"{{ end }}>
-        <a href="{{ if $pag.HasNext }}{{ $pag.Next.Url }}{{ end }}" aria-label="Next"><span aria-hidden="true">&raquo;</span></a>
+        <a href="{{ if $pag.HasNext }}{{ $pag.Next.URL }}{{ end }}" aria-label="Next"><span aria-hidden="true">&raquo;</span></a>
         </li>
         {{ with $pag.Last }}
         <li>
-            <a href="{{ .Url }}" aria-label="Last"><span aria-hidden="true">&raquo;&raquo;</span></a>
+            <a href="{{ .URL }}" aria-label="Last"><span aria-hidden="true">&raquo;&raquo;</span></a>
         </li>
         {{ end }}
     </ul>
@@ -134,7 +134,7 @@
   <meta property="og:image" content="{{ . }}" />
 {{ end }}{{ end }}
 
-{{ if not .Date.IsZero }}<meta property="og:updated_time" content="{{ .Date.Format "2006-01-02T15:04:05-07:00" | safeHtml }}"/>{{ end }}{{ with .Params.audio }}
+{{ if not .Date.IsZero }}<meta property="og:updated_time" content="{{ .Date.Format "2006-01-02T15:04:05-07:00" | safeHTML }}"/>{{ end }}{{ with .Params.audio }}
 <meta property="og:audio" content="{{ . }}" />{{ end }}{{ with .Params.locale }}
 <meta property="og:locale" content="{{ . }}" />{{ end }}{{ with .Site.Params.title }}
 <meta property="og:site_name" content="{{ . }}" />{{ end }}{{ with .Params.videos }}
@@ -193,8 +193,8 @@
 <meta itemprop="description" content="{{ with .Description }}{{ . }}{{ else }}{{if .IsPage}}{{ .Summary }}{{ else }}{{ with .Site.Params.description }}{{ . }}{{ end }}{{ end }}{{ end }}">
 
 {{if .IsPage}}{{ $ISO8601 := "2006-01-02T15:04:05-07:00" }}{{ if not .PublishDate.IsZero }}
-<meta itemprop="datePublished" content="{{ .PublishDate.Format $ISO8601 | safeHtml }}" />{{ end }}
-{{ if not .Date.IsZero }}<meta itemprop="dateModified" content="{{ .Date.Format $ISO8601 | safeHtml }}" />{{ end }}
+<meta itemprop="datePublished" content="{{ .PublishDate.Format $ISO8601 | safeHTML }}" />{{ end }}
+{{ if not .Date.IsZero }}<meta itemprop="dateModified" content="{{ .Date.Format $ISO8601 | safeHTML }}" />{{ end }}
 <meta itemprop="wordCount" content="{{ .WordCount }}">
 {{ with .Params.images }}{{ range first 6 . }}
   <meta itemprop="image" content="{{ . }}">
--- a/tpl/template_resources.go
+++ b/tpl/template_resources.go
@@ -38,7 +38,7 @@
 	m map[string]*sync.Mutex
 }
 
-// resLock locks an URL during download
+// URLLock locks an URL during download
 func (l *remoteLock) URLLock(url string) {
 	l.Lock()
 	if _, ok := l.m[url]; !ok {
@@ -48,7 +48,7 @@
 	l.m[url].Lock()
 }
 
-// resUnlock unlocks an URL when the download has been finished. Use only in defer calls.
+// URLUnlock unlocks an URL when the download has been finished. Use only in defer calls.
 func (l *remoteLock) URLUnlock(url string) {
 	l.RLock()
 	defer l.RUnlock()
@@ -57,7 +57,7 @@
 	}
 }
 
-// getFileID returns the cache ID for a string
+// getCacheFileID returns the cache ID for a string
 func getCacheFileID(id string) string {
 	return viper.GetString("CacheDir") + url.QueryEscape(id)
 }
@@ -173,9 +173,9 @@
 	return resGetLocal(url, hugofs.SourceFs)
 }
 
-// GetJson expects one or n-parts of a URL to a resource which can either be a local or a remote one.
+// GetJSON expects one or n-parts of a URL to a resource which can either be a local or a remote one.
 // If you provide multiple parts they will be joined together to the final URL.
-// GetJson returns nil or parsed JSON to use in a short code.
+// GetJSON returns nil or parsed JSON to use in a short code.
 func GetJSON(urlParts ...string) interface{} {
 	url := strings.Join(urlParts, "")
 	c, err := resGetResource(url)
@@ -193,7 +193,7 @@
 	return v
 }
 
-// parseCsv parses bytes of csv data into a slice slice string or an error
+// parseCSV parses bytes of CSV data into a slice slice string or an error
 func parseCSV(c []byte, sep string) ([][]string, error) {
 	if len(sep) != 1 {
 		return nil, errors.New("Incorrect length of csv separator: " + sep)
@@ -206,11 +206,11 @@
 	return r.ReadAll()
 }
 
-// GetCsv expects a data separator and one or n-parts of a URL to a resource which
+// GetCSV expects a data separator and one or n-parts of a URL to a resource which
 // can either be a local or a remote one.
 // The data separator can be a comma, semi-colon, pipe, etc, but only one character.
 // If you provide multiple parts for the URL they will be joined together to the final URL.
-// GetCsv returns nil or a slice slice to use in a short code.
+// GetCSV returns nil or a slice slice to use in a short code.
 func GetCSV(sep string, urlParts ...string) [][]string {
 	url := strings.Join(urlParts, "")
 	c, err := resGetResource(url)
--- a/tpl/template_test.go
+++ b/tpl/template_test.go
@@ -999,10 +999,10 @@
 		buf.Reset()
 		err = tmpl.Execute(buf, SafeHTML(this.str))
 		if err != nil {
-			t.Errorf("[%d] execute template with an escaped string value by SafeHtml returns unexpected error: %s", i, err)
+			t.Errorf("[%d] execute template with an escaped string value by SafeHTML returns unexpected error: %s", i, err)
 		}
 		if buf.String() != this.expectWithEscape {
-			t.Errorf("[%d] execute template with an escaped string value by SafeHtml, got %v but expected %v", i, buf.String(), this.expectWithEscape)
+			t.Errorf("[%d] execute template with an escaped string value by SafeHTML, got %v but expected %v", i, buf.String(), this.expectWithEscape)
 		}
 	}
 }
@@ -1034,10 +1034,10 @@
 		buf.Reset()
 		err = tmpl.Execute(buf, SafeHTMLAttr(this.str))
 		if err != nil {
-			t.Errorf("[%d] execute template with an escaped string value by SafeHtmlAttr returns unexpected error: %s", i, err)
+			t.Errorf("[%d] execute template with an escaped string value by SafeHTMLAttr returns unexpected error: %s", i, err)
 		}
 		if buf.String() != this.expectWithEscape {
-			t.Errorf("[%d] execute template with an escaped string value by SafeHtmlAttr, got %v but expected %v", i, buf.String(), this.expectWithEscape)
+			t.Errorf("[%d] execute template with an escaped string value by SafeHTMLAttr, got %v but expected %v", i, buf.String(), this.expectWithEscape)
 		}
 	}
 }
@@ -1069,10 +1069,10 @@
 		buf.Reset()
 		err = tmpl.Execute(buf, SafeCSS(this.str))
 		if err != nil {
-			t.Errorf("[%d] execute template with an escaped string value by SafeCss returns unexpected error: %s", i, err)
+			t.Errorf("[%d] execute template with an escaped string value by SafeCSS returns unexpected error: %s", i, err)
 		}
 		if buf.String() != this.expectWithEscape {
-			t.Errorf("[%d] execute template with an escaped string value by SafeCss, got %v but expected %v", i, buf.String(), this.expectWithEscape)
+			t.Errorf("[%d] execute template with an escaped string value by SafeCSS, got %v but expected %v", i, buf.String(), this.expectWithEscape)
 		}
 	}
 }
@@ -1104,10 +1104,10 @@
 		buf.Reset()
 		err = tmpl.Execute(buf, SafeURL(this.str))
 		if err != nil {
-			t.Errorf("[%d] execute template with an escaped string value by SafeUrl returns unexpected error: %s", i, err)
+			t.Errorf("[%d] execute template with an escaped string value by SafeURL returns unexpected error: %s", i, err)
 		}
 		if buf.String() != this.expectWithEscape {
-			t.Errorf("[%d] execute template with an escaped string value by SafeUrl, got %v but expected %v", i, buf.String(), this.expectWithEscape)
+			t.Errorf("[%d] execute template with an escaped string value by SafeURL, got %v but expected %v", i, buf.String(), this.expectWithEscape)
 		}
 	}
 }
--- a/transform/absurl.go
+++ b/transform/absurl.go
@@ -7,15 +7,15 @@
 var absURLInit sync.Once
 var ar *absURLReplacer
 
-// for performance reasons, we reuse the first baseUrl given
-func initAbsurlReplacer(baseURL string) {
+// for performance reasons, we reuse the first baseURL given
+func initAbsURLReplacer(baseURL string) {
 	absURLInit.Do(func() {
-		ar = newAbsurlReplacer(baseURL)
+		ar = newAbsURLReplacer(baseURL)
 	})
 }
 
 func AbsURL(absURL string) (trs []link, err error) {
-	initAbsurlReplacer(absURL)
+	initAbsURLReplacer(absURL)
 
 	trs = append(trs, func(content []byte) []byte {
 		return ar.replaceInHTML(content)
@@ -24,7 +24,7 @@
 }
 
 func AbsURLInXML(absURL string) (trs []link, err error) {
-	initAbsurlReplacer(absURL)
+	initAbsURLReplacer(absURL)
 
 	trs = append(trs, func(content []byte) []byte {
 		return ar.replaceInXML(content)
--- a/transform/absurlreplacer.go
+++ b/transform/absurlreplacer.go
@@ -120,7 +120,7 @@
 		}
 
 		if bytes.HasPrefix(l.content[l.pos:], m.match) {
-			// check for schemaless urls
+			// check for schemaless URLs
 			posAfter := l.pos + len(m.match)
 			if int(posAfter) >= len(l.content) {
 				return
@@ -196,7 +196,7 @@
 	xmlMatchers  []absURLMatcher
 }
 
-func newAbsurlReplacer(baseURL string) *absURLReplacer {
+func newAbsURLReplacer(baseURL string) *absURLReplacer {
 	u, _ := url.Parse(baseURL)
 	base := strings.TrimRight(u.String(), "/")