shithub: hugo

Download patch

ref: 67df33f50069445d53d373790e8c636e47013a72
parent: 00f07c5374d859057c6e7c4731000b5978364819
author: Anthony Fok <[email protected]>
date: Wed Mar 11 07:34:57 EDT 2015

Correct initialisms as suggested by golint

First step to use initialisms that golint suggests,
for example:

    Line 116: func GetHtmlRenderer should be GetHTMLRenderer

as see on http://goreportcard.com/report/spf13/hugo

Thanks to @bep for the idea!

Note that command-line flags (cobra and pflag)
as well as struct fields like .BaseUrl and .Url
that are used in Go HTML templates need more work
to maintain backward-compatibility, and thus
are NOT yet dealt with in this commit.

First step in fixing #959.

--- a/commands/hugo.go
+++ b/commands/hugo.go
@@ -56,8 +56,8 @@
 var hugoCmdV *cobra.Command
 
 //Flags that are to be added to commands.
-var BuildWatch, IgnoreCache, Draft, Future, UglyUrls, Verbose, Logging, VerboseLog, DisableRSS, DisableSitemap, PluralizeListTitles, NoTimes bool
-var Source, CacheDir, Destination, Theme, BaseUrl, CfgFile, LogFile, Editor string
+var BuildWatch, IgnoreCache, Draft, Future, UglyURLs, Verbose, Logging, VerboseLog, DisableRSS, DisableSitemap, PluralizeListTitles, NoTimes bool
+var Source, CacheDir, Destination, Theme, BaseURL, CfgFile, LogFile, Editor string
 
 //Execute adds all child commands to the root command HugoCmd and sets flags appropriately.
 func Execute() {
@@ -89,8 +89,8 @@
 	HugoCmd.PersistentFlags().StringVarP(&Destination, "destination", "d", "", "filesystem path to write files to")
 	HugoCmd.PersistentFlags().StringVarP(&Theme, "theme", "t", "", "theme to use (located in /themes/THEMENAME/)")
 	HugoCmd.PersistentFlags().BoolVarP(&Verbose, "verbose", "v", false, "verbose output")
-	HugoCmd.PersistentFlags().BoolVar(&UglyUrls, "uglyUrls", false, "if true, use /filename.html instead of /filename/")
-	HugoCmd.PersistentFlags().StringVarP(&BaseUrl, "baseUrl", "b", "", "hostname (and path) to the root eg. http://spf13.com/")
+	HugoCmd.PersistentFlags().BoolVar(&UglyURLs, "uglyUrls", false, "if true, use /filename.html instead of /filename/")
+	HugoCmd.PersistentFlags().StringVarP(&BaseURL, "baseUrl", "b", "", "hostname (and path) to the root eg. http://spf13.com/")
 	HugoCmd.PersistentFlags().StringVar(&CfgFile, "config", "", "config file (default is path/config.yaml|json|toml)")
 	HugoCmd.PersistentFlags().StringVar(&Editor, "editor", "", "edit new content with this editor, if provided")
 	HugoCmd.PersistentFlags().BoolVar(&Logging, "log", false, "Enable Logging")
@@ -127,10 +127,10 @@
 	viper.SetDefault("DefaultLayout", "post")
 	viper.SetDefault("BuildDrafts", false)
 	viper.SetDefault("BuildFuture", false)
-	viper.SetDefault("UglyUrls", false)
+	viper.SetDefault("UglyURLs", false)
 	viper.SetDefault("Verbose", false)
 	viper.SetDefault("IgnoreCache", false)
-	viper.SetDefault("CanonifyUrls", false)
+	viper.SetDefault("CanonifyURLs", false)
 	viper.SetDefault("Taxonomies", map[string]string{"tag": "tags", "category": "categories"})
 	viper.SetDefault("Permalinks", make(hugolib.PermalinkOverrides, 0))
 	viper.SetDefault("Sitemap", hugolib.Sitemap{Priority: -1})
@@ -155,7 +155,7 @@
 	}
 
 	if hugoCmdV.PersistentFlags().Lookup("uglyUrls").Changed {
-		viper.Set("UglyUrls", UglyUrls)
+		viper.Set("UglyURLs", UglyURLs)
 	}
 
 	if hugoCmdV.PersistentFlags().Lookup("disableRSS").Changed {
@@ -181,14 +181,14 @@
 	if hugoCmdV.PersistentFlags().Lookup("logFile").Changed {
 		viper.Set("LogFile", LogFile)
 	}
-	if BaseUrl != "" {
-		if !strings.HasSuffix(BaseUrl, "/") {
-			BaseUrl = BaseUrl + "/"
+	if BaseURL != "" {
+		if !strings.HasSuffix(BaseURL, "/") {
+			BaseURL = BaseURL + "/"
 		}
-		viper.Set("BaseUrl", BaseUrl)
+		viper.Set("BaseURL", BaseURL)
 	}
 
-	if viper.GetString("BaseUrl") == "" {
+	if viper.GetString("BaseURL") == "" {
 		jww.ERROR.Println("No 'baseurl' set in configuration or as a flag. Features like page menus will not work without one.")
 	}
 
--- a/commands/server.go
+++ b/commands/server.go
@@ -84,11 +84,11 @@
 
 	viper.Set("port", serverPort)
 
-	BaseUrl, err := fixUrl(BaseUrl)
+	BaseURL, err := fixURL(BaseURL)
 	if err != nil {
 		jww.ERROR.Fatal(err)
 	}
-	viper.Set("BaseUrl", BaseUrl)
+	viper.Set("BaseURL", BaseURL)
 
 	if err := memStats(); err != nil {
 		jww.ERROR.Println("memstats error:", err)
@@ -114,9 +114,9 @@
 	httpFs := &afero.HttpFs{SourceFs: hugofs.DestinationFS}
 	fileserver := http.FileServer(httpFs.Dir(helpers.AbsPathify(viper.GetString("PublishDir"))))
 
-	u, err := url.Parse(viper.GetString("BaseUrl"))
+	u, err := url.Parse(viper.GetString("BaseURL"))
 	if err != nil {
-		jww.ERROR.Fatalf("Invalid BaseUrl: %s", err)
+		jww.ERROR.Fatalf("Invalid BaseURL: %s", err)
 	}
 	if u.Path == "" || u.Path == "/" {
 		http.Handle("/", fileserver)
@@ -137,10 +137,10 @@
 
 // fixUrl massages the BaseUrl into a form needed for serving
 // all pages correctly.
-func fixUrl(s string) (string, error) {
+func fixURL(s string) (string, error) {
 	useLocalhost := false
 	if s == "" {
-		s = viper.GetString("BaseUrl")
+		s = viper.GetString("BaseURL")
 		useLocalhost = true
 	}
 	if !strings.HasPrefix(s, "http://") && !strings.HasPrefix(s, "https://") {
--- a/commands/server_test.go
+++ b/commands/server_test.go
@@ -6,11 +6,11 @@
 	"github.com/spf13/viper"
 )
 
-func TestFixUrl(t *testing.T) {
+func TestFixURL(t *testing.T) {
 	type data struct {
 		TestName   string
-		CliBaseUrl string
-		CfgBaseUrl string
+		CLIBaseURL string
+		CfgBaseURL string
 		AppendPort bool
 		Port       int
 		Result     string
@@ -28,11 +28,11 @@
 	}
 
 	for i, test := range tests {
-		BaseUrl = test.CliBaseUrl
-		viper.Set("BaseUrl", test.CfgBaseUrl)
+		BaseURL = test.CLIBaseURL
+		viper.Set("BaseURL", test.CfgBaseURL)
 		serverAppend = test.AppendPort
 		serverPort = test.Port
-		result, err := fixUrl(BaseUrl)
+		result, err := fixURL(BaseURL)
 		if err != nil {
 			t.Errorf("Test #%d %s: unexpected error %s", i, test.TestName, err)
 		}
--- a/helpers/content.go
+++ b/helpers/content.go
@@ -41,7 +41,7 @@
 type Blackfriday struct {
 	AngledQuotes   bool
 	Fractions      bool
-	PlainIdAnchors bool
+	PlainIDAnchors bool
 	Extensions     []string
 }
 
@@ -50,7 +50,7 @@
 	return &Blackfriday{
 		AngledQuotes:   false,
 		Fractions:      true,
-		PlainIdAnchors: false,
+		PlainIDAnchors: false,
 	}
 }
 
@@ -113,17 +113,17 @@
 }
 
 // GetHtmlRenderer creates a new Renderer with the given configuration.
-func GetHtmlRenderer(defaultFlags int, ctx *RenderingContext) blackfriday.Renderer {
+func GetHTMLRenderer(defaultFlags int, ctx *RenderingContext) blackfriday.Renderer {
 	renderParameters := blackfriday.HtmlRendererParameters{
 		FootnoteAnchorPrefix:       viper.GetString("FootnoteAnchorPrefix"),
 		FootnoteReturnLinkContents: viper.GetString("FootnoteReturnLinkContents"),
 	}
 
-	b := len(ctx.DocumentId) != 0
+	b := len(ctx.DocumentID) != 0
 
-	if b && !ctx.getConfig().PlainIdAnchors {
-		renderParameters.FootnoteAnchorPrefix = ctx.DocumentId + ":" + renderParameters.FootnoteAnchorPrefix
-		renderParameters.HeaderIDSuffix = ":" + ctx.DocumentId
+	if b && !ctx.getConfig().PlainIDAnchors {
+		renderParameters.FootnoteAnchorPrefix = ctx.DocumentID + ":" + renderParameters.FootnoteAnchorPrefix
+		renderParameters.HeaderIDSuffix = ":" + ctx.DocumentID
 	}
 
 	htmlFlags := defaultFlags
@@ -158,13 +158,13 @@
 }
 
 func markdownRender(ctx *RenderingContext) []byte {
-	return blackfriday.Markdown(ctx.Content, GetHtmlRenderer(0, ctx),
+	return blackfriday.Markdown(ctx.Content, GetHTMLRenderer(0, ctx),
 		getMarkdownExtensions(ctx))
 }
 
 func markdownRenderWithTOC(ctx *RenderingContext) []byte {
 	return blackfriday.Markdown(ctx.Content,
-		GetHtmlRenderer(blackfriday.HTML_TOC, ctx),
+		GetHTMLRenderer(blackfriday.HTML_TOC, ctx),
 		getMarkdownExtensions(ctx))
 }
 
@@ -209,7 +209,7 @@
 type RenderingContext struct {
 	Content    []byte
 	PageFmt    string
-	DocumentId string
+	DocumentID string
 	Config     *Blackfriday
 	configInit sync.Once
 }
--- a/helpers/content_test.go
+++ b/helpers/content_test.go
@@ -7,7 +7,7 @@
 	"testing"
 )
 
-const tstHtmlContent = "<!DOCTYPE html><html><head><script src=\"http://two/foobar.js\"></script></head><body><nav><ul><li hugo-nav=\"section_0\"></li><li hugo-nav=\"section_1\"></li></ul></nav><article>content <a href=\"http://two/foobar\">foobar</a>. Follow up</article><p>This is some text.<br>And some more.</p></body></html>"
+const tstHTMLContent = "<!DOCTYPE html><html><head><script src=\"http://two/foobar.js\"></script></head><body><nav><ul><li hugo-nav=\"section_0\"></li><li hugo-nav=\"section_1\"></li></ul></nav><article>content <a href=\"http://two/foobar\">foobar</a>. Follow up</article><p>This is some text.<br>And some more.</p></body></html>"
 
 func TestStripHTML(t *testing.T) {
 	type test struct {
@@ -31,7 +31,7 @@
 func BenchmarkStripHTML(b *testing.B) {
 	b.ResetTimer()
 	for i := 0; i < b.N; i++ {
-		StripHTML(tstHtmlContent)
+		StripHTML(tstHTMLContent)
 	}
 }
 
--- a/helpers/url.go
+++ b/helpers/url.go
@@ -52,7 +52,7 @@
 
 var pathBridge PathBridge
 
-func sanitizeUrlWithFlags(in string, f purell.NormalizationFlags) string {
+func sanitizeURLWithFlags(in string, f purell.NormalizationFlags) string {
 	s, err := purell.NormalizeURLString(in, f)
 	if err != nil {
 		return in
@@ -88,13 +88,13 @@
 }
 
 // SanitizeUrl sanitizes the input URL string.
-func SanitizeUrl(in string) string {
-	return sanitizeUrlWithFlags(in, purell.FlagsSafe|purell.FlagRemoveTrailingSlash|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
+func SanitizeURL(in string) string {
+	return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveTrailingSlash|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
 }
 
 // SanitizeUrlKeepTrailingSlash is the same as SanitizeUrl, but will keep any trailing slash.
-func SanitizeUrlKeepTrailingSlash(in string) string {
-	return sanitizeUrlWithFlags(in, purell.FlagsSafe|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
+func SanitizeURLKeepTrailingSlash(in string) string {
+	return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
 }
 
 // Similar to MakePath, but with Unicode handling
@@ -101,7 +101,7 @@
 // Example:
 //     uri: Vim (text editor)
 //     urlize: vim-text-editor
-func Urlize(uri string) string {
+func URLize(uri string) string {
 	sanitized := MakePathToLower(uri)
 
 	// escape unicode letters
@@ -148,9 +148,9 @@
 // AddContextRoot adds the context root to an URL if it's not already set.
 // For relative URL entries on sites with a base url with a context root set (i.e. http://example.com/mysite),
 // relative URLs must not include the context root if canonifyUrls is enabled. But if it's disabled, it must be set.
-func AddContextRoot(baseUrl, relativePath string) string {
+func AddContextRoot(baseURL, relativePath string) string {
 
-	url, err := url.Parse(baseUrl)
+	url, err := url.Parse(baseURL)
 	if err != nil {
 		panic(err)
 	}
@@ -164,16 +164,16 @@
 	return newPath
 }
 
-func UrlizeAndPrep(in string) string {
-	return UrlPrep(viper.GetBool("UglyUrls"), Urlize(in))
+func URLizeAndPrep(in string) string {
+	return URLPrep(viper.GetBool("UglyURLs"), URLize(in))
 }
 
-func UrlPrep(ugly bool, in string) string {
+func URLPrep(ugly bool, in string) string {
 	if ugly {
-		x := Uglify(SanitizeUrl(in))
+		x := Uglify(SanitizeURL(in))
 		return x
 	}
-	x := PrettifyUrl(SanitizeUrl(in))
+	x := PrettifyURL(SanitizeURL(in))
 	if path.Ext(x) == ".xml" {
 		return x
 	}
@@ -186,8 +186,8 @@
 }
 
 // PrettifyUrl takes a URL string and returns a semantic, clean URL.
-func PrettifyUrl(in string) string {
-	x := PrettifyUrlPath(in)
+func PrettifyURL(in string) string {
+	x := PrettifyURLPath(in)
 
 	if path.Base(x) == "index.html" {
 		return path.Dir(x)
@@ -205,7 +205,7 @@
 //     /section/name.html       becomes /section/name/index.html
 //     /section/name/           becomes /section/name/index.html
 //     /section/name/index.html becomes /section/name/index.html
-func PrettifyUrlPath(in string) string {
+func PrettifyURLPath(in string) string {
 	return PrettiyPath(in, pathBridge)
 }
 
--- a/helpers/url_test.go
+++ b/helpers/url_test.go
@@ -19,7 +19,7 @@
 	}
 
 	for _, test := range tests {
-		output := Urlize(test.input)
+		output := URLize(test.input)
 		if output != test.expected {
 			t.Errorf("Expected %#v, got %#v\n", test.expected, output)
 		}
@@ -36,8 +36,8 @@
 	}
 
 	for _, test := range tests {
-		o1 := SanitizeUrl(test.input)
-		o2 := SanitizeUrlKeepTrailingSlash(test.input)
+		o1 := SanitizeURL(test.input)
+		o2 := SanitizeURLKeepTrailingSlash(test.input)
 
 		expected2 := test.expected
 
@@ -88,7 +88,7 @@
 		{true, "/section/name/index.html", "/section/name.html"},
 	}
 	for i, d := range data {
-		output := UrlPrep(d.ugly, d.input)
+		output := URLPrep(d.ugly, d.input)
 		if d.output != output {
 			t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
 		}
@@ -98,7 +98,7 @@
 
 func TestAddContextRoot(t *testing.T) {
 	tests := []struct {
-		baseUrl  string
+		baseURL  string
 		url      string
 		expected string
 	}{
@@ -114,7 +114,7 @@
 	}
 
 	for _, test := range tests {
-		output := AddContextRoot(test.baseUrl, test.url)
+		output := AddContextRoot(test.baseURL, test.url)
 		if output != test.expected {
 			t.Errorf("Expected %#v, got %#v\n", test.expected, output)
 		}
@@ -122,22 +122,22 @@
 }
 
 func TestPretty(t *testing.T) {
-	assert.Equal(t, PrettifyUrlPath("/section/name.html"), "/section/name/index.html")
-	assert.Equal(t, PrettifyUrlPath("/section/sub/name.html"), "/section/sub/name/index.html")
-	assert.Equal(t, PrettifyUrlPath("/section/name/"), "/section/name/index.html")
-	assert.Equal(t, PrettifyUrlPath("/section/name/index.html"), "/section/name/index.html")
-	assert.Equal(t, PrettifyUrlPath("/index.html"), "/index.html")
-	assert.Equal(t, PrettifyUrlPath("/name.xml"), "/name/index.xml")
-	assert.Equal(t, PrettifyUrlPath("/"), "/")
-	assert.Equal(t, PrettifyUrlPath(""), "/")
-	assert.Equal(t, PrettifyUrl("/section/name.html"), "/section/name")
-	assert.Equal(t, PrettifyUrl("/section/sub/name.html"), "/section/sub/name")
-	assert.Equal(t, PrettifyUrl("/section/name/"), "/section/name")
-	assert.Equal(t, PrettifyUrl("/section/name/index.html"), "/section/name")
-	assert.Equal(t, PrettifyUrl("/index.html"), "/")
-	assert.Equal(t, PrettifyUrl("/name.xml"), "/name/index.xml")
-	assert.Equal(t, PrettifyUrl("/"), "/")
-	assert.Equal(t, PrettifyUrl(""), "/")
+	assert.Equal(t, PrettifyURLPath("/section/name.html"), "/section/name/index.html")
+	assert.Equal(t, PrettifyURLPath("/section/sub/name.html"), "/section/sub/name/index.html")
+	assert.Equal(t, PrettifyURLPath("/section/name/"), "/section/name/index.html")
+	assert.Equal(t, PrettifyURLPath("/section/name/index.html"), "/section/name/index.html")
+	assert.Equal(t, PrettifyURLPath("/index.html"), "/index.html")
+	assert.Equal(t, PrettifyURLPath("/name.xml"), "/name/index.xml")
+	assert.Equal(t, PrettifyURLPath("/"), "/")
+	assert.Equal(t, PrettifyURLPath(""), "/")
+	assert.Equal(t, PrettifyURL("/section/name.html"), "/section/name")
+	assert.Equal(t, PrettifyURL("/section/sub/name.html"), "/section/sub/name")
+	assert.Equal(t, PrettifyURL("/section/name/"), "/section/name")
+	assert.Equal(t, PrettifyURL("/section/name/index.html"), "/section/name")
+	assert.Equal(t, PrettifyURL("/index.html"), "/")
+	assert.Equal(t, PrettifyURL("/name.xml"), "/name/index.xml")
+	assert.Equal(t, PrettifyURL("/"), "/")
+	assert.Equal(t, PrettifyURL(""), "/")
 }
 
 func TestUgly(t *testing.T) {
--- a/hugolib/menu_test.go
+++ b/hugolib/menu_test.go
@@ -95,7 +95,7 @@
 	{"sect/doc3.md", MENU_PAGE_3},
 }
 
-func tstCreateMenuPageWithNameToml(title, menu, name string) []byte {
+func tstCreateMenuPageWithNameTOML(title, menu, name string) []byte {
 	return []byte(fmt.Sprintf(`+++
 title = "%s"
 weight = 1
@@ -106,7 +106,7 @@
 Front Matter with Menu with Name`, title, menu, name))
 }
 
-func tstCreateMenuPageWithIdentifierToml(title, menu, identifier string) []byte {
+func tstCreateMenuPageWithIdentifierTOML(title, menu, identifier string) []byte {
 	return []byte(fmt.Sprintf(`+++
 title = "%s"
 weight = 1
@@ -118,7 +118,7 @@
 Front Matter with Menu with Identifier`, title, menu, identifier))
 }
 
-func tstCreateMenuPageWithNameYaml(title, menu, name string) []byte {
+func tstCreateMenuPageWithNameYAML(title, menu, name string) []byte {
 	return []byte(fmt.Sprintf(`---
 title: "%s"
 weight: 1
@@ -129,7 +129,7 @@
 Front Matter with Menu with Name`, title, menu, name))
 }
 
-func tstCreateMenuPageWithIdentifierYaml(title, menu, identifier string) []byte {
+func tstCreateMenuPageWithIdentifierYAML(title, menu, identifier string) []byte {
 	return []byte(fmt.Sprintf(`---
 title: "%s"
 weight: 1
@@ -144,7 +144,7 @@
 type testMenuState struct {
 	site       *Site
 	oldMenu    interface{}
-	oldBaseUrl interface{}
+	oldBaseURL interface{}
 }
 
 // Issue 817 - identifier should trump everything
@@ -151,15 +151,15 @@
 func TestPageMenuWithIdentifier(t *testing.T) {
 
 	toml := []source.ByteSource{
-		{"sect/doc1.md", tstCreateMenuPageWithIdentifierToml("t1", "m1", "i1")},
-		{"sect/doc2.md", tstCreateMenuPageWithIdentifierToml("t1", "m1", "i2")},
-		{"sect/doc3.md", tstCreateMenuPageWithIdentifierToml("t1", "m1", "i2")}, // duplicate
+		{"sect/doc1.md", tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i1")},
+		{"sect/doc2.md", tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i2")},
+		{"sect/doc3.md", tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i2")}, // duplicate
 	}
 
 	yaml := []source.ByteSource{
-		{"sect/doc1.md", tstCreateMenuPageWithIdentifierYaml("t1", "m1", "i1")},
-		{"sect/doc2.md", tstCreateMenuPageWithIdentifierYaml("t1", "m1", "i2")},
-		{"sect/doc3.md", tstCreateMenuPageWithIdentifierYaml("t1", "m1", "i2")}, // duplicate
+		{"sect/doc1.md", tstCreateMenuPageWithIdentifierYAML("t1", "m1", "i1")},
+		{"sect/doc2.md", tstCreateMenuPageWithIdentifierYAML("t1", "m1", "i2")},
+		{"sect/doc3.md", tstCreateMenuPageWithIdentifierYAML("t1", "m1", "i2")}, // duplicate
 	}
 
 	doTestPageMenuWithIdentifier(t, toml)
@@ -174,8 +174,8 @@
 
 	assert.Equal(t, 3, len(ts.site.Pages), "Not enough pages")
 
-	me1 := ts.findTestMenuEntryById("m1", "i1")
-	me2 := ts.findTestMenuEntryById("m1", "i2")
+	me1 := ts.findTestMenuEntryByID("m1", "i1")
+	me2 := ts.findTestMenuEntryByID("m1", "i2")
 
 	assert.NotNil(t, me1)
 	assert.NotNil(t, me2)
@@ -188,15 +188,15 @@
 // Issue 817 contd - name should be second identifier in
 func TestPageMenuWithDuplicateName(t *testing.T) {
 	toml := []source.ByteSource{
-		{"sect/doc1.md", tstCreateMenuPageWithNameToml("t1", "m1", "n1")},
-		{"sect/doc2.md", tstCreateMenuPageWithNameToml("t1", "m1", "n2")},
-		{"sect/doc3.md", tstCreateMenuPageWithNameToml("t1", "m1", "n2")}, // duplicate
+		{"sect/doc1.md", tstCreateMenuPageWithNameTOML("t1", "m1", "n1")},
+		{"sect/doc2.md", tstCreateMenuPageWithNameTOML("t1", "m1", "n2")},
+		{"sect/doc3.md", tstCreateMenuPageWithNameTOML("t1", "m1", "n2")}, // duplicate
 	}
 
 	yaml := []source.ByteSource{
-		{"sect/doc1.md", tstCreateMenuPageWithNameYaml("t1", "m1", "n1")},
-		{"sect/doc2.md", tstCreateMenuPageWithNameYaml("t1", "m1", "n2")},
-		{"sect/doc3.md", tstCreateMenuPageWithNameYaml("t1", "m1", "n2")}, // duplicate
+		{"sect/doc1.md", tstCreateMenuPageWithNameYAML("t1", "m1", "n1")},
+		{"sect/doc2.md", tstCreateMenuPageWithNameYAML("t1", "m1", "n2")},
+		{"sect/doc3.md", tstCreateMenuPageWithNameYAML("t1", "m1", "n2")}, // duplicate
 	}
 
 	doTestPageMenuWithDuplicateName(t, toml)
@@ -234,7 +234,7 @@
 	third := ts.site.Pages[2]
 
 	pOne := ts.findTestMenuEntryByName("p_one", "One")
-	pTwo := ts.findTestMenuEntryById("p_two", "Two")
+	pTwo := ts.findTestMenuEntryByID("p_two", "Two")
 
 	for i, this := range []struct {
 		menu           string
@@ -267,11 +267,11 @@
 }
 
 // issue #888
-func TestMenuWithHashInUrl(t *testing.T) {
+func TestMenuWithHashInURL(t *testing.T) {
 	ts := setupMenuTests(t, MENU_PAGE_SOURCES)
 	defer resetMenuTestState(ts)
 
-	me := ts.findTestMenuEntryById("hash", "hash")
+	me := ts.findTestMenuEntryByID("hash", "hash")
 
 	assert.NotNil(t, me)
 
@@ -279,41 +279,41 @@
 }
 
 // issue #719
-func TestMenuWithUnicodeUrls(t *testing.T) {
-	for _, uglyUrls := range []bool{true, false} {
-		for _, canonifyUrls := range []bool{true, false} {
-			doTestMenuWithUnicodeUrls(t, canonifyUrls, uglyUrls)
+func TestMenuWithUnicodeURLs(t *testing.T) {
+	for _, uglyURLs := range []bool{true, false} {
+		for _, canonifyURLs := range []bool{true, false} {
+			doTestMenuWithUnicodeURLs(t, canonifyURLs, uglyURLs)
 		}
 	}
 }
 
-func doTestMenuWithUnicodeUrls(t *testing.T, canonifyUrls, uglyUrls bool) {
-	viper.Set("CanonifyUrls", canonifyUrls)
-	viper.Set("UglyUrls", uglyUrls)
+func doTestMenuWithUnicodeURLs(t *testing.T, canonifyURLs, uglyURLs bool) {
+	viper.Set("CanonifyURLs", canonifyURLs)
+	viper.Set("UglyURLs", uglyURLs)
 
 	ts := setupMenuTests(t, MENU_PAGE_SOURCES)
 	defer resetMenuTestState(ts)
 
-	unicodeRussian := ts.findTestMenuEntryById("unicode", "unicode-russian")
+	unicodeRussian := ts.findTestMenuEntryByID("unicode", "unicode-russian")
 
 	expectedBase := "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0"
 
-	if !canonifyUrls {
+	if !canonifyURLs {
 		expectedBase = "/Zoo" + expectedBase
 	}
 
 	var expected string
-	if uglyUrls {
+	if uglyURLs {
 		expected = expectedBase + ".html"
 	} else {
 		expected = expectedBase + "/"
 	}
 
-	assert.Equal(t, expected, unicodeRussian.Url, "uglyUrls[%t]", uglyUrls)
+	assert.Equal(t, expected, unicodeRussian.Url, "uglyURLs[%t]", uglyURLs)
 }
 
 func TestTaxonomyNodeMenu(t *testing.T) {
-	viper.Set("CanonifyUrls", true)
+	viper.Set("CanonifyURLs", true)
 	ts := setupMenuTests(t, MENU_PAGE_SOURCES)
 	defer resetMenuTestState(ts)
 
@@ -325,9 +325,9 @@
 		hasMenuCurrent bool
 	}{
 		{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
-			ts.findTestMenuEntryById("tax", "1"), true, false},
+			ts.findTestMenuEntryByID("tax", "1"), true, false},
 		{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
-			ts.findTestMenuEntryById("tax", "2"), true, false},
+			ts.findTestMenuEntryByID("tax", "2"), true, false},
 		{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
 			&MenuEntry{Name: "Somewhere else", Url: "/somewhereelse"}, false, false},
 	} {
@@ -347,9 +347,9 @@
 
 	}
 
-	menuEntryXml := ts.findTestMenuEntryById("tax", "xml")
+	menuEntryXML := ts.findTestMenuEntryByID("tax", "xml")
 
-	if strings.HasSuffix(menuEntryXml.Url, "/") {
+	if strings.HasSuffix(menuEntryXML.Url, "/") {
 		t.Error("RSS menu item should not be padded with trailing slash")
 	}
 }
@@ -370,9 +370,9 @@
 		{"main", homeMenuEntry, true, false},
 		{"doesnotexist", homeMenuEntry, false, false},
 		{"main", &MenuEntry{Name: "Somewhere else", Url: "/somewhereelse"}, false, false},
-		{"grandparent", ts.findTestMenuEntryById("grandparent", "grandparentId"), false, false},
-		{"grandparent", ts.findTestMenuEntryById("grandparent", "parentId"), false, true},
-		{"grandparent", ts.findTestMenuEntryById("grandparent", "grandchildId"), true, false},
+		{"grandparent", ts.findTestMenuEntryByID("grandparent", "grandparentId"), false, false},
+		{"grandparent", ts.findTestMenuEntryByID("grandparent", "parentId"), false, true},
+		{"grandparent", ts.findTestMenuEntryByID("grandparent", "grandchildId"), true, false},
 	} {
 
 		isMenuCurrent := home.IsMenuCurrent(this.menu, this.menuItem)
@@ -391,7 +391,7 @@
 var testMenuIdentityMatcher = func(me *MenuEntry, id string) bool { return me.Identifier == id }
 var testMenuNameMatcher = func(me *MenuEntry, id string) bool { return me.Name == id }
 
-func (ts testMenuState) findTestMenuEntryById(mn string, id string) *MenuEntry {
+func (ts testMenuState) findTestMenuEntryByID(mn string, id string) *MenuEntry {
 	return ts.findTestMenuEntry(mn, id, testMenuIdentityMatcher)
 }
 func (ts testMenuState) findTestMenuEntryByName(mn string, id string) *MenuEntry {
@@ -447,7 +447,7 @@
 }
 
 func getTestMenuState(s *Site, t *testing.T) *testMenuState {
-	menuState := &testMenuState{site: s, oldBaseUrl: viper.Get("baseurl"), oldMenu: viper.Get("menu")}
+	menuState := &testMenuState{site: s, oldBaseURL: viper.Get("baseurl"), oldMenu: viper.Get("menu")}
 
 	menus, err := tomlToMap(CONF_MENU1)
 
@@ -471,7 +471,7 @@
 
 func resetMenuTestState(state *testMenuState) {
 	viper.Set("menu", state.oldMenu)
-	viper.Set("baseurl", state.oldBaseUrl)
+	viper.Set("baseurl", state.oldBaseURL)
 }
 
 func createTestSite(pageSources []source.ByteSource) *Site {
--- a/hugolib/node.go
+++ b/hugolib/node.go
@@ -40,7 +40,7 @@
 	return time.Now()
 }
 
-func (n *Node) HasMenuCurrent(menuId string, inme *MenuEntry) bool {
+func (n *Node) HasMenuCurrent(menuID string, inme *MenuEntry) bool {
 	if inme.HasChildren() {
 		me := MenuEntry{Name: n.Title, Url: n.Url}
 
@@ -54,7 +54,7 @@
 	return false
 }
 
-func (n *Node) IsMenuCurrent(menuId string, inme *MenuEntry) bool {
+func (n *Node) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
 
 	me := MenuEntry{Name: n.Title, Url: n.Url}
 	if !me.IsSameResource(inme) {
@@ -63,7 +63,7 @@
 
 	// this resource may be included in several menus
 	// search for it to make sure that it is in the menu with the given menuId
-	if menu, ok := (*n.Site.Menus)[menuId]; ok {
+	if menu, ok := (*n.Site.Menus)[menuID]; ok {
 		for _, menuEntry := range *menu {
 			if menuEntry.IsSameResource(inme) {
 				return true
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -149,8 +149,8 @@
 	return al
 }
 
-func (p *Page) UniqueId() string {
-	return p.Source.UniqueId()
+func (p *Page) UniqueID() string {
+	return p.Source.UniqueID()
 }
 
 func (p *Page) Ref(ref string) (string, error) {
@@ -200,12 +200,12 @@
 func (p *Page) renderBytes(content []byte) []byte {
 	return helpers.RenderBytes(
 		&helpers.RenderingContext{Content: content, PageFmt: p.guessMarkupType(),
-			DocumentId: p.UniqueId(), Config: p.getRenderingConfig()})
+			DocumentID: p.UniqueID(), Config: p.getRenderingConfig()})
 }
 
 func (p *Page) renderContent(content []byte) []byte {
 	return helpers.RenderBytesWithTOC(&helpers.RenderingContext{Content: content, PageFmt: p.guessMarkupType(),
-		DocumentId: p.UniqueId(), Config: p.getRenderingConfig()})
+		DocumentID: p.UniqueID(), Config: p.getRenderingConfig()})
 }
 
 func (p *Page) getRenderingConfig() *helpers.Blackfriday {
@@ -341,15 +341,15 @@
 }
 
 func (p *Page) permalink() (*url.URL, error) {
-	baseUrl := string(p.Site.BaseUrl)
+	baseURL := string(p.Site.BaseUrl)
 	dir := strings.TrimSpace(filepath.ToSlash(p.Source.Dir()))
 	pSlug := strings.TrimSpace(p.Slug)
-	pUrl := strings.TrimSpace(p.Url)
+	pURL := strings.TrimSpace(p.Url)
 	var permalink string
 	var err error
 
-	if len(pUrl) > 0 {
-		return helpers.MakePermalink(baseUrl, pUrl), nil
+	if len(pURL) > 0 {
+		return helpers.MakePermalink(baseURL, pURL), nil
 	}
 
 	if override, ok := p.Site.Permalinks[p.Section()]; ok {
@@ -361,14 +361,14 @@
 		// fmt.Printf("have a section override for %q in section %s → %s\n", p.Title, p.Section, permalink)
 	} else {
 		if len(pSlug) > 0 {
-			permalink = helpers.UrlPrep(viper.GetBool("UglyUrls"), path.Join(dir, p.Slug+"."+p.Extension()))
+			permalink = helpers.URLPrep(viper.GetBool("UglyURLs"), path.Join(dir, p.Slug+"."+p.Extension()))
 		} else {
 			_, t := filepath.Split(p.Source.LogicalName())
-			permalink = helpers.UrlPrep(viper.GetBool("UglyUrls"), path.Join(dir, helpers.ReplaceExtension(strings.TrimSpace(t), p.Extension())))
+			permalink = helpers.URLPrep(viper.GetBool("UglyURLs"), path.Join(dir, helpers.ReplaceExtension(strings.TrimSpace(t), p.Extension())))
 		}
 	}
 
-	return helpers.MakePermalink(baseUrl, permalink), nil
+	return helpers.MakePermalink(baseURL, permalink), nil
 }
 
 func (p *Page) Extension() string {
@@ -419,7 +419,7 @@
 		return "", err
 	}
 
-	if viper.GetBool("CanonifyUrls") {
+	if viper.GetBool("CanonifyURLs") {
 		// replacements for relpermalink with baseUrl on the form http://myhost.com/sub/ will fail later on
 		// have to return the Url relative from baseUrl
 		relpath, err := helpers.GetRelativePath(link.String(), string(p.Site.BaseUrl))
@@ -452,12 +452,12 @@
 		case "description":
 			p.Description = cast.ToString(v)
 		case "slug":
-			p.Slug = helpers.Urlize(cast.ToString(v))
+			p.Slug = helpers.URLize(cast.ToString(v))
 		case "url":
 			if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
 				return fmt.Errorf("Only relative urls are supported, %v provided", url)
 			}
-			p.Url = helpers.Urlize(cast.ToString(v))
+			p.Url = helpers.URLize(cast.ToString(v))
 		case "type":
 			p.contentType = cast.ToString(v)
 		case "extension", "ext":
--- a/hugolib/page_permalink_test.go
+++ b/hugolib/page_permalink_test.go
@@ -16,8 +16,8 @@
 		base         template.URL
 		slug         string
 		url          string
-		uglyUrls     bool
-		canonifyUrls bool
+		uglyURLs     bool
+		canonifyURLs bool
 		expectedAbs  string
 		expectedRel  string
 	}{
@@ -42,8 +42,8 @@
 	viper.Set("DefaultExtension", "html")
 
 	for i, test := range tests {
-		viper.Set("uglyurls", test.uglyUrls)
-		viper.Set("canonifyurls", test.canonifyUrls)
+		viper.Set("uglyurls", test.uglyURLs)
+		viper.Set("canonifyurls", test.canonifyURLs)
 		p := &Page{
 			Node: Node{
 				UrlPath: UrlPath{
--- a/hugolib/pagesPrevNext.go
+++ b/hugolib/pagesPrevNext.go
@@ -15,7 +15,7 @@
 
 func (p Pages) Prev(cur *Page) *Page {
 	for x, c := range p {
-		if c.UniqueId() == cur.UniqueId() {
+		if c.UniqueID() == cur.UniqueID() {
 			if x == 0 {
 				return p[len(p)-1]
 			}
@@ -27,7 +27,7 @@
 
 func (p Pages) Next(cur *Page) *Page {
 	for x, c := range p {
-		if c.UniqueId() == cur.UniqueId() {
+		if c.UniqueID() == cur.UniqueID() {
 			if x < len(p)-1 {
 				return p[x+1]
 			}
--- a/hugolib/pagination.go
+++ b/hugolib/pagination.go
@@ -35,12 +35,12 @@
 type paginator struct {
 	paginatedPages []Pages
 	pagers
-	paginationUrlFactory
+	paginationURLFactory
 	total int
 	size  int
 }
 
-type paginationUrlFactory func(int) string
+type paginationURLFactory func(int) string
 
 // PageNumber returns the current page's number in the pager sequence.
 func (p *pager) PageNumber() int {
@@ -49,7 +49,7 @@
 
 // Url returns the url to the current page.
 func (p *pager) Url() template.HTML {
-	return template.HTML(p.paginationUrlFactory(p.PageNumber()))
+	return template.HTML(p.paginationURLFactory(p.PageNumber()))
 }
 
 // Pages returns the elements on this page.
@@ -225,7 +225,7 @@
 		return nil, errors.New(fmt.Sprintf("unsupported type in paginate, got %T", seq))
 	}
 
-	urlFactory := newPaginationUrlFactory(section)
+	urlFactory := newPaginationURLFactory(section)
 	paginator, _ := newPaginator(pages, paginateSize, urlFactory)
 	pagers := paginator.Pagers()
 
@@ -232,7 +232,7 @@
 	return pagers, nil
 }
 
-func newPaginator(pages Pages, size int, urlFactory paginationUrlFactory) (*paginator, error) {
+func newPaginator(pages Pages, size int, urlFactory paginationURLFactory) (*paginator, error) {
 
 	if size <= 0 {
 		return nil, errors.New("Paginator size must be positive")
@@ -240,7 +240,7 @@
 
 	split := splitPages(pages, size)
 
-	p := &paginator{total: len(pages), paginatedPages: split, size: size, paginationUrlFactory: urlFactory}
+	p := &paginator{total: len(pages), paginatedPages: split, size: size, paginationURLFactory: urlFactory}
 
 	var ps pagers
 
@@ -259,7 +259,7 @@
 	return p, nil
 }
 
-func newPaginationUrlFactory(pathElements ...string) paginationUrlFactory {
+func newPaginationURLFactory(pathElements ...string) paginationURLFactory {
 	paginatePath := viper.GetString("paginatePath")
 
 	return func(page int) string {
@@ -270,6 +270,6 @@
 			rel = fmt.Sprintf("/%s/%s/%d/", path.Join(pathElements...), paginatePath, page)
 		}
 
-		return helpers.UrlizeAndPrep(rel)
+		return helpers.URLizeAndPrep(rel)
 	}
 }
--- a/hugolib/pagination_test.go
+++ b/hugolib/pagination_test.go
@@ -99,8 +99,8 @@
 
 func TestPaginationUrlFactory(t *testing.T) {
 	viper.Set("PaginatePath", "zoo")
-	unicode := newPaginationUrlFactory("новости проекта")
-	fooBar := newPaginationUrlFactory("foo", "bar")
+	unicode := newPaginationURLFactory("новости проекта")
+	fooBar := newPaginationURLFactory("foo", "bar")
 
 	assert.Equal(t, "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0/", unicode(1))
 	assert.Equal(t, "/foo/bar/", fooBar(1))
--- a/hugolib/path_separators_windows_test.go
+++ b/hugolib/path_separators_windows_test.go
@@ -11,7 +11,7 @@
 )
 
 func TestTemplatePathSeparator(t *testing.T) {
-	tmpl := new(tpl.GoHtmlTemplate)
+	tmpl := new(tpl.GoHTMLTemplate)
 	if name := tmpl.GenerateTemplateNameFrom(win_base, win_path); name != "sub1/index.html" {
 		t.Fatalf("Template name incorrect.  Expected: %s, Got: %s", "sub1/index.html", name)
 	}
--- a/hugolib/permalinks.go
+++ b/hugolib/permalinks.go
@@ -139,7 +139,7 @@
 func pageToPermalinkTitle(p *Page, _ string) (string, error) {
 	// Page contains Node which has Title
 	// (also contains UrlPath which has Slug, sometimes)
-	return helpers.Urlize(p.Title), nil
+	return helpers.URLize(p.Title), nil
 }
 
 // pageToPermalinkFilename returns the URL-safe form of the filename
@@ -146,7 +146,7 @@
 func pageToPermalinkFilename(p *Page, _ string) (string, error) {
 	//var extension = p.Source.Ext
 	//var name = p.Source.Path()[0 : len(p.Source.Path())-len(extension)]
-	return helpers.Urlize(p.Source.BaseFileName()), nil
+	return helpers.URLize(p.Source.BaseFileName()), nil
 }
 
 // if the page has a slug, return the slug, else return the title
--- a/hugolib/shortcode.go
+++ b/hugolib/shortcode.go
@@ -203,7 +203,7 @@
 		if sc.doMarkup {
 			newInner := helpers.RenderBytes(&helpers.RenderingContext{
 				Content: []byte(inner), PageFmt: p.guessMarkupType(),
-				DocumentId: p.UniqueId(), Config: p.getRenderingConfig()})
+				DocumentID: p.UniqueID(), Config: p.getRenderingConfig()})
 
 			// If the type is “unknown” or “markdown”, we assume the markdown
 			// generation has been performed. Given the input: `a line`, markdown
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -113,7 +113,7 @@
 	Permalinks          PermalinkOverrides
 	Params              map[string]interface{}
 	BuildDrafts         bool
-	canonifyUrls        bool
+	canonifyURLs        bool
 	paginationPageCount uint64
 	Data                *map[string]interface{}
 }
@@ -158,10 +158,10 @@
 }
 
 func (s *SiteInfo) refLink(ref string, page *Page, relative bool) (string, error) {
-	var refUrl *url.URL
+	var refURL *url.URL
 	var err error
 
-	refUrl, err = url.Parse(ref)
+	refURL, err = url.Parse(ref)
 
 	if err != nil {
 		return "", err
@@ -170,9 +170,9 @@
 	var target *Page
 	var link string
 
-	if refUrl.Path != "" {
+	if refURL.Path != "" {
 		for _, page := range []*Page(*s.Pages) {
-			if page.Source.Path() == refUrl.Path || page.Source.LogicalName() == refUrl.Path {
+			if page.Source.Path() == refURL.Path || page.Source.LogicalName() == refURL.Path {
 				target = page
 				break
 			}
@@ -179,7 +179,7 @@
 		}
 
 		if target == nil {
-			return "", fmt.Errorf("No page found with path or logical name \"%s\".\n", refUrl.Path)
+			return "", fmt.Errorf("No page found with path or logical name \"%s\".\n", refURL.Path)
 		}
 
 		if relative {
@@ -193,13 +193,13 @@
 		}
 	}
 
-	if refUrl.Fragment != "" {
-		link = link + "#" + refUrl.Fragment
+	if refURL.Fragment != "" {
+		link = link + "#" + refURL.Fragment
 
-		if refUrl.Path != "" && target != nil && !target.getRenderingConfig().PlainIdAnchors {
-			link = link + ":" + target.UniqueId()
-		} else if page != nil && !page.getRenderingConfig().PlainIdAnchors {
-			link = link + ":" + page.UniqueId()
+		if refURL.Path != "" && target != nil && !target.getRenderingConfig().PlainIDAnchors {
+			link = link + ":" + target.UniqueID()
+		} else if page != nil && !page.getRenderingConfig().PlainIDAnchors {
+			link = link + ":" + page.UniqueID()
 		}
 	}
 
@@ -316,11 +316,11 @@
 func readData(f *source.File) (interface{}, error) {
 	switch f.Extension() {
 	case "yaml", "yml":
-		return parser.HandleYamlMetaData(f.Bytes())
+		return parser.HandleYAMLMetaData(f.Bytes())
 	case "json":
-		return parser.HandleJsonMetaData(f.Bytes())
+		return parser.HandleJSONMetaData(f.Bytes())
 	case "toml":
-		return parser.HandleTomlMetaData(f.Bytes())
+		return parser.HandleTOMLMetaData(f.Bytes())
 	default:
 		return nil, fmt.Errorf("Data not supported for extension '%s'", f.Extension())
 	}
@@ -444,7 +444,7 @@
 	}
 
 	s.Info = SiteInfo{
-		BaseUrl:         template.URL(helpers.SanitizeUrlKeepTrailingSlash(viper.GetString("BaseUrl"))),
+		BaseUrl:         template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))),
 		Title:           viper.GetString("Title"),
 		Author:          viper.GetStringMap("author"),
 		LanguageCode:    viper.GetString("languagecode"),
@@ -451,7 +451,7 @@
 		Copyright:       viper.GetString("copyright"),
 		DisqusShortname: viper.GetString("DisqusShortname"),
 		BuildDrafts:     viper.GetBool("BuildDrafts"),
-		canonifyUrls:    viper.GetBool("CanonifyUrls"),
+		canonifyURLs:    viper.GetBool("CanonifyURLs"),
 		Pages:           &s.Pages,
 		Recent:          &s.Pages,
 		Menus:           &s.Menus,
@@ -706,12 +706,12 @@
 
 					if strings.HasPrefix(menuEntry.Url, "/") {
 						// make it match the nodes
-						menuEntryUrl := menuEntry.Url
-						menuEntryUrl = helpers.UrlizeAndPrep(menuEntryUrl)
-						if !s.Info.canonifyUrls {
-							menuEntryUrl = helpers.AddContextRoot(string(s.Info.BaseUrl), menuEntryUrl)
+						menuEntryURL := menuEntry.Url
+						menuEntryURL = helpers.URLizeAndPrep(menuEntryURL)
+						if !s.Info.canonifyURLs {
+							menuEntryURL = helpers.AddContextRoot(string(s.Info.BaseUrl), menuEntryURL)
 						}
-						menuEntry.Url = menuEntryUrl
+						menuEntry.Url = menuEntryURL
 					}
 
 					if ret[name] == nil {
@@ -1249,7 +1249,7 @@
 		}
 	}
 
-	n.Url = helpers.Urlize("404.html")
+	n.Url = helpers.URLize("404.html")
 	n.Title = "404 Page not found"
 	n.Permalink = s.permalink("404.html")
 
@@ -1315,7 +1315,7 @@
 }
 
 func (s *Site) setUrls(n *Node, in string) {
-	n.Url = helpers.UrlizeAndPrep(in)
+	n.Url = helpers.URLizeAndPrep(in)
 	n.Permalink = s.permalink(n.Url)
 	n.RSSLink = s.permalink(in + ".xml")
 }
@@ -1325,7 +1325,7 @@
 }
 
 func (s *Site) permalinkStr(plink string) string {
-	return helpers.MakePermalink(string(viper.GetString("BaseUrl")), helpers.UrlizeAndPrep(plink)).String()
+	return helpers.MakePermalink(string(viper.GetString("BaseURL")), helpers.URLizeAndPrep(plink)).String()
 }
 
 func (s *Site) NewNode() *Node {
@@ -1348,7 +1348,7 @@
 
 	err := s.render(name, d, renderBuffer, layouts...)
 
-	absURLInXML, err := transform.AbsURLInXML(viper.GetString("BaseUrl"))
+	absURLInXML, err := transform.AbsURLInXML(viper.GetString("BaseURL"))
 	if err != nil {
 		return err
 	}
@@ -1377,8 +1377,8 @@
 
 	transformLinks := transform.NewEmptyTransforms()
 
-	if viper.GetBool("CanonifyUrls") {
-		absURL, err := transform.AbsURL(viper.GetString("BaseUrl"))
+	if viper.GetBool("CanonifyURLs") {
+		absURL, err := transform.AbsURL(viper.GetString("BaseURL"))
 		if err != nil {
 			return err
 		}
@@ -1460,7 +1460,7 @@
 		if s.Targets.Page == nil {
 			s.Targets.Page = &target.PagePub{
 				PublishDir: s.absPublishDir(),
-				UglyUrls:   viper.GetBool("UglyUrls"),
+				UglyURLs:   viper.GetBool("UglyURLs"),
 			}
 		}
 		if s.Targets.File == nil {
--- a/hugolib/site_show_plan_test.go
+++ b/hugolib/site_show_plan_test.go
@@ -92,7 +92,7 @@
 
 func TestPageTargetUgly(t *testing.T) {
 	s := &Site{
-		Targets: targetList{Page: &target.PagePub{UglyUrls: true}},
+		Targets: targetList{Page: &target.PagePub{UglyURLs: true}},
 		Source:  &source.InMemorySource{ByteSource: fakeSource},
 	}
 	s.AliasTarget()
--- a/hugolib/site_test.go
+++ b/hugolib/site_test.go
@@ -309,18 +309,18 @@
 
 // Issue #939
 func Test404ShouldAlwaysHaveUglyUrls(t *testing.T) {
-	for _, uglyUrls := range []bool{true, false} {
-		doTest404ShouldAlwaysHaveUglyUrls(t, uglyUrls)
+	for _, uglyURLs := range []bool{true, false} {
+		doTest404ShouldAlwaysHaveUglyUrls(t, uglyURLs)
 	}
 }
 
-func doTest404ShouldAlwaysHaveUglyUrls(t *testing.T, uglyUrls bool) {
+func doTest404ShouldAlwaysHaveUglyUrls(t *testing.T, uglyURLs bool) {
 	viper.Set("verbose", true)
 	viper.Set("baseurl", "http://auth/bub")
 	viper.Set("DisableSitemap", false)
 	viper.Set("DisableRSS", false)
 
-	viper.Set("UglyUrls", uglyUrls)
+	viper.Set("UglyURLs", uglyURLs)
 
 	sources := []source.ByteSource{
 		{filepath.FromSlash("sect/doc1.html"), []byte("---\nmarkup: markdown\n---\n# title\nsome *content*")},
@@ -328,7 +328,7 @@
 
 	s := &Site{
 		Source:  &source.InMemorySource{ByteSource: sources},
-		Targets: targetList{Page: &target.PagePub{UglyUrls: uglyUrls}},
+		Targets: targetList{Page: &target.PagePub{UglyURLs: uglyURLs}},
 	}
 
 	s.initializeSiteInfo()
@@ -347,7 +347,7 @@
 	s.RenderSitemap()
 
 	var expectedPagePath string
-	if uglyUrls {
+	if uglyURLs {
 		expectedPagePath = "sect/doc1.html"
 	} else {
 		expectedPagePath = "sect/doc1/index.html"
@@ -392,11 +392,11 @@
 	}
 
 	viper.Set("verbose", true)
-	viper.Set("CanonifyUrls", true)
+	viper.Set("CanonifyURLs", true)
 	viper.Set("baseurl", "http://auth/bub")
 	s := &Site{
 		Source:  &source.InMemorySource{ByteSource: sources},
-		Targets: targetList{Page: &target.PagePub{UglyUrls: true}},
+		Targets: targetList{Page: &target.PagePub{UglyURLs: true}},
 	}
 
 	s.initializeSiteInfo()
@@ -442,13 +442,13 @@
 		{filepath.FromSlash("content/blue/doc2.html"), []byte("---\nf: t\n---\n<!doctype html><html><body>more content</body></html>")},
 	}
 	for _, canonify := range []bool{true, false} {
-		viper.Set("CanonifyUrls", canonify)
-		viper.Set("BaseUrl", "http://auth/bub")
+		viper.Set("CanonifyURLs", canonify)
+		viper.Set("BaseURL", "http://auth/bub")
 		s := &Site{
 			Source:  &source.InMemorySource{ByteSource: sources},
-			Targets: targetList{Page: &target.PagePub{UglyUrls: true}},
+			Targets: targetList{Page: &target.PagePub{UglyURLs: true}},
 		}
-		t.Logf("Rendering with BaseUrl %q and CanonifyUrls set %v", viper.GetString("baseUrl"), canonify)
+		t.Logf("Rendering with BaseURL %q and CanonifyURLs set %v", viper.GetString("baseURL"), canonify)
 		s.initializeSiteInfo()
 		templatePrep(s)
 		must(s.addTemplate("blue/single.html", TEMPLATE_WITH_URL_ABS))
@@ -823,13 +823,13 @@
 	}
 }
 
-func TestDataDirJson(t *testing.T) {
+func TestDataDirJSON(t *testing.T) {
 	sources := []source.ByteSource{
 		{filepath.FromSlash("test/foo.json"), []byte(`{ "bar": "foofoo"  }`)},
 		{filepath.FromSlash("test.json"), []byte(`{ "hello": [ { "world": "foo" } ] }`)},
 	}
 
-	expected, err := parser.HandleJsonMetaData([]byte(`{ "test": { "hello": [{ "world": "foo"  }] , "foo": { "bar":"foofoo" } } }`))
+	expected, err := parser.HandleJSONMetaData([]byte(`{ "test": { "hello": [{ "world": "foo"  }] , "foo": { "bar":"foofoo" } } }`))
 
 	if err != nil {
 		t.Fatalf("Error %s", err)
@@ -843,7 +843,7 @@
 		{filepath.FromSlash("test/kung.toml"), []byte("[foo]\nbar = 1")},
 	}
 
-	expected, err := parser.HandleTomlMetaData([]byte("[test]\n[test.kung]\n[test.kung.foo]\nbar = 1"))
+	expected, err := parser.HandleTOMLMetaData([]byte("[test]\n[test.kung]\n[test.kung.foo]\nbar = 1"))
 
 	if err != nil {
 		t.Fatalf("Error %s", err)
@@ -852,7 +852,7 @@
 	doTestDataDir(t, expected, []source.Input{&source.InMemorySource{ByteSource: sources}})
 }
 
-func TestDataDirYamlWithOverridenValue(t *testing.T) {
+func TestDataDirYAMLWithOverridenValue(t *testing.T) {
 	sources := []source.ByteSource{
 		// filepath.Walk walks the files in lexical order, '/' comes before '.'. Simulate this:
 		{filepath.FromSlash("a.yaml"), []byte("a: 1")},
@@ -878,7 +878,7 @@
 		{filepath.FromSlash("test/second.toml"), []byte("tender = 2")},
 	}
 
-	expected, _ := parser.HandleTomlMetaData([]byte("[test.first]\nbar = 1\n[test.second]\ntender=2"))
+	expected, _ := parser.HandleTOMLMetaData([]byte("[test.first]\nbar = 1\n[test.second]\ntender=2"))
 
 	doTestDataDir(t, expected, []source.Input{&source.InMemorySource{ByteSource: s1}, &source.InMemorySource{ByteSource: s2}})
 
--- a/hugolib/taxonomy.go
+++ b/hugolib/taxonomy.go
@@ -162,7 +162,7 @@
 
 func (wp WeightedPages) Prev(cur *Page) *Page {
 	for x, c := range wp {
-		if c.Page.UniqueId() == cur.UniqueId() {
+		if c.Page.UniqueID() == cur.UniqueID() {
 			if x == 0 {
 				return wp[len(wp)-1].Page
 			}
@@ -174,7 +174,7 @@
 
 func (wp WeightedPages) Next(cur *Page) *Page {
 	for x, c := range wp {
-		if c.Page.UniqueId() == cur.UniqueId() {
+		if c.Page.UniqueID() == cur.UniqueID() {
 			if x < len(wp)-1 {
 				return wp[x+1].Page
 			}
--- a/parser/frontmatter.go
+++ b/parser/frontmatter.go
@@ -151,19 +151,19 @@
 func DetectFrontMatter(mark rune) (f *FrontmatterType) {
 	switch mark {
 	case '-':
-		return &FrontmatterType{[]byte(YAML_DELIM), []byte(YAML_DELIM), HandleYamlMetaData, false}
+		return &FrontmatterType{[]byte(YAML_DELIM), []byte(YAML_DELIM), HandleYAMLMetaData, false}
 	case '+':
-		return &FrontmatterType{[]byte(TOML_DELIM), []byte(TOML_DELIM), HandleTomlMetaData, false}
+		return &FrontmatterType{[]byte(TOML_DELIM), []byte(TOML_DELIM), HandleTOMLMetaData, false}
 	case '{':
-		return &FrontmatterType{[]byte{'{'}, []byte{'}'}, HandleJsonMetaData, true}
+		return &FrontmatterType{[]byte{'{'}, []byte{'}'}, HandleJSONMetaData, true}
 	default:
 		return nil
 	}
 }
 
-func HandleTomlMetaData(datum []byte) (interface{}, error) {
+func HandleTOMLMetaData(datum []byte) (interface{}, error) {
 	m := map[string]interface{}{}
-	datum = removeTomlIdentifier(datum)
+	datum = removeTOMLIdentifier(datum)
 	if _, err := toml.Decode(string(datum), &m); err != nil {
 		return m, err
 	}
@@ -170,11 +170,11 @@
 	return m, nil
 }
 
-func removeTomlIdentifier(datum []byte) []byte {
+func removeTOMLIdentifier(datum []byte) []byte {
 	return bytes.Replace(datum, []byte(TOML_DELIM), []byte(""), -1)
 }
 
-func HandleYamlMetaData(datum []byte) (interface{}, error) {
+func HandleYAMLMetaData(datum []byte) (interface{}, error) {
 	m := map[string]interface{}{}
 	if err := yaml.Unmarshal(datum, &m); err != nil {
 		return m, err
@@ -182,7 +182,7 @@
 	return m, nil
 }
 
-func HandleJsonMetaData(datum []byte) (interface{}, error) {
+func HandleJSONMetaData(datum []byte) (interface{}, error) {
 	var f interface{}
 	if err := json.Unmarshal(datum, &f); err != nil {
 		return f, err
--- a/source/file.go
+++ b/source/file.go
@@ -27,14 +27,14 @@
 	section     string // The first directory
 	dir         string // The full directory Path (minus file name)
 	ext         string // Just the ext (eg txt)
-	uniqueId    string // MD5 of the filename
+	uniqueID    string // MD5 of the filename
 }
 
-func (f *File) UniqueId() string {
-	if f.uniqueId == "" {
-		f.uniqueId = helpers.Md5String(f.LogicalName())
+func (f *File) UniqueID() string {
+	if f.uniqueID == "" {
+		f.uniqueID = helpers.Md5String(f.LogicalName())
 	}
-	return f.uniqueId
+	return f.uniqueID
 }
 
 func (f *File) String() string {
--- a/target/page.go
+++ b/target/page.go
@@ -16,7 +16,7 @@
 }
 
 type PagePub struct {
-	UglyUrls         bool
+	UglyURLs         bool
 	DefaultExtension string
 	PublishDir       string
 }
@@ -46,7 +46,7 @@
 		dir = filepath.Join(pp.PublishDir, dir)
 	}
 
-	if pp.UglyUrls || file == "index.html" || file == "404.html" {
+	if pp.UglyURLs || file == "index.html" || file == "404.html" {
 		return filepath.Join(dir, fmt.Sprintf("%s%s", name, ext)), nil
 	}
 
--- a/target/page_test.go
+++ b/target/page_test.go
@@ -74,7 +74,7 @@
 	}
 
 	for _, test := range tests {
-		f := &PagePub{UglyUrls: true}
+		f := &PagePub{UglyURLs: true}
 		dest, err := f.Translate(filepath.FromSlash(test.content))
 		if err != nil {
 			t.Fatalf("Translate returned an unexpected err: %s", err)
--- a/tpl/template.go
+++ b/tpl/template.go
@@ -58,7 +58,7 @@
 	err  error
 }
 
-type GoHtmlTemplate struct {
+type GoHTMLTemplate struct {
 	template.Template
 	errors []*templateErr
 }
@@ -81,7 +81,7 @@
 // Return a new Hugo Template System
 // With all the additional features, templates & functions
 func New() Template {
-	var templates = &GoHtmlTemplate{
+	var templates = &GoHTMLTemplate{
 		Template: *template.New(""),
 		errors:   make([]*templateErr, 0),
 	}
@@ -934,21 +934,21 @@
 	return t.Format(layout), nil
 }
 
-func SafeHtml(text string) template.HTML {
+func SafeHTML(text string) template.HTML {
 	return template.HTML(text)
 }
 
 // "safeHtmlAttr" is currently disabled, pending further discussion
 // on its use case.  2015-01-19
-func SafeHtmlAttr(text string) template.HTMLAttr {
+func SafeHTMLAttr(text string) template.HTMLAttr {
 	return template.HTMLAttr(text)
 }
 
-func SafeCss(text string) template.CSS {
+func SafeCSS(text string) template.CSS {
 	return template.CSS(text)
 }
 
-func SafeUrl(text string) template.URL {
+func SafeURL(text string) template.URL {
 	return template.URL(text)
 }
 
@@ -1151,12 +1151,12 @@
 	return template.HTML(b.String())
 }
 
-func (t *GoHtmlTemplate) LoadEmbedded() {
+func (t *GoHTMLTemplate) LoadEmbedded() {
 	t.EmbedShortcodes()
 	t.EmbedTemplates()
 }
 
-func (t *GoHtmlTemplate) AddInternalTemplate(prefix, name, tpl string) error {
+func (t *GoHTMLTemplate) AddInternalTemplate(prefix, name, tpl string) error {
 	if prefix != "" {
 		return t.AddTemplate("_internal/"+prefix+"/"+name, tpl)
 	} else {
@@ -1164,11 +1164,11 @@
 	}
 }
 
-func (t *GoHtmlTemplate) AddInternalShortcode(name, content string) error {
+func (t *GoHTMLTemplate) AddInternalShortcode(name, content string) error {
 	return t.AddInternalTemplate("shortcodes", name, content)
 }
 
-func (t *GoHtmlTemplate) AddTemplate(name, tpl string) error {
+func (t *GoHTMLTemplate) AddTemplate(name, tpl string) error {
 	_, err := t.New(name).Parse(tpl)
 	if err != nil {
 		t.errors = append(t.errors, &templateErr{name: name, err: err})
@@ -1176,7 +1176,7 @@
 	return err
 }
 
-func (t *GoHtmlTemplate) AddTemplateFile(name, path string) error {
+func (t *GoHTMLTemplate) AddTemplateFile(name, path string) error {
 	// get the suffix and switch on that
 	ext := filepath.Ext(path)
 	switch ext {
@@ -1221,7 +1221,7 @@
 
 }
 
-func (t *GoHtmlTemplate) GenerateTemplateNameFrom(base, path string) string {
+func (t *GoHTMLTemplate) GenerateTemplateNameFrom(base, path string) string {
 	name, _ := filepath.Rel(base, path)
 	return filepath.ToSlash(name)
 }
@@ -1234,7 +1234,7 @@
 	return path[len(path)-1] == '~'
 }
 
-func (t *GoHtmlTemplate) loadTemplates(absPath string, prefix string) {
+func (t *GoHTMLTemplate) loadTemplates(absPath string, prefix string) {
 	walker := func(path string, fi os.FileInfo, err error) error {
 		if err != nil {
 			return nil
@@ -1277,15 +1277,15 @@
 	filepath.Walk(absPath, walker)
 }
 
-func (t *GoHtmlTemplate) LoadTemplatesWithPrefix(absPath string, prefix string) {
+func (t *GoHTMLTemplate) LoadTemplatesWithPrefix(absPath string, prefix string) {
 	t.loadTemplates(absPath, prefix)
 }
 
-func (t *GoHtmlTemplate) LoadTemplates(absPath string) {
+func (t *GoHTMLTemplate) LoadTemplates(absPath string) {
 	t.loadTemplates(absPath, "")
 }
 
-func (t *GoHtmlTemplate) PrintErrors() {
+func (t *GoHTMLTemplate) PrintErrors() {
 	for _, e := range t.errors {
 		jww.ERROR.Println(e.err)
 	}
@@ -1293,8 +1293,9 @@
 
 func init() {
 	funcMap = template.FuncMap{
-		"urlize":      helpers.Urlize,
-		"sanitizeurl": helpers.SanitizeUrl,
+		"urlize":      helpers.URLize,
+		"sanitizeURL": helpers.SanitizeURL,
+		"sanitizeurl": helpers.SanitizeURL,
 		"eq":          Eq,
 		"ne":          Ne,
 		"gt":          Gt,
@@ -1303,11 +1304,15 @@
 		"le":          Le,
 		"in":          In,
 		"intersect":   Intersect,
+		"isSet":       IsSet,
 		"isset":       IsSet,
 		"echoParam":   ReturnWhenSet,
-		"safeHtml":    SafeHtml,
-		"safeCss":     SafeCss,
-		"safeUrl":     SafeUrl,
+		"safeHTML":    SafeHTML,
+		"safeHtml":    SafeHTML,
+		"safeCSS":     SafeCSS,
+		"safeCss":     SafeCSS,
+		"safeURL":     SafeURL,
+		"safeUrl":     SafeURL,
 		"markdownify": Markdownify,
 		"first":       First,
 		"where":       Where,
@@ -1331,8 +1336,10 @@
 		"replace":     Replace,
 		"trim":        Trim,
 		"dateFormat":  DateFormat,
-		"getJson":     GetJson,
-		"getCsv":      GetCsv,
+		"getJSON":     GetJSON,
+		"getJson":     GetJSON,
+		"getCSV":      GetCSV,
+		"getCsv":      GetCSV,
 	}
 
 }
--- a/tpl/template_embedded.go
+++ b/tpl/template_embedded.go
@@ -18,7 +18,7 @@
 	Data string
 }
 
-func (t *GoHtmlTemplate) EmbedShortcodes() {
+func (t *GoHTMLTemplate) EmbedShortcodes() {
 	t.AddInternalShortcode("ref.html", `{{ .Get 0 | ref .Page }}`)
 	t.AddInternalShortcode("relref.html", `{{ .Get 0 | relref .Page }}`)
 	t.AddInternalShortcode("highlight.html", `{{ .Get 0 | highlight .Inner  }}`)
@@ -43,7 +43,7 @@
 <!-- image -->`)
 }
 
-func (t *GoHtmlTemplate) EmbedTemplates() {
+func (t *GoHTMLTemplate) EmbedTemplates() {
 
 	t.AddInternalTemplate("_default", "rss.xml", `<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
   <channel>
--- a/tpl/template_resources.go
+++ b/tpl/template_resources.go
@@ -31,7 +31,7 @@
 	"github.com/spf13/viper"
 )
 
-var remoteUrlLock = &remoteLock{m: make(map[string]*sync.Mutex)}
+var remoteURLLock = &remoteLock{m: make(map[string]*sync.Mutex)}
 
 type remoteLock struct {
 	sync.RWMutex
@@ -39,7 +39,7 @@
 }
 
 // resLock locks an URL during download
-func (l *remoteLock) UrlLock(url string) {
+func (l *remoteLock) URLLock(url string) {
 	l.Lock()
 	if _, ok := l.m[url]; !ok {
 		l.m[url] = &sync.Mutex{}
@@ -49,7 +49,7 @@
 }
 
 // resUnlock unlocks an URL when the download has been finished. Use only in defer calls.
-func (l *remoteLock) UrlUnlock(url string) {
+func (l *remoteLock) URLUnlock(url string) {
 	l.RLock()
 	defer l.RUnlock()
 	if um, ok := l.m[url]; ok {
@@ -111,8 +111,8 @@
 	}
 
 	// avoid race condition with locks, block other goroutines if the current url is processing
-	remoteUrlLock.UrlLock(url)
-	defer func() { remoteUrlLock.UrlUnlock(url) }()
+	remoteURLLock.URLLock(url)
+	defer func() { remoteURLLock.URLUnlock(url) }()
 
 	// avoid multiple locks due to calling resGetCache twice
 	c, err = resGetCache(url, fs, viper.GetBool("IgnoreCache"))
@@ -176,7 +176,7 @@
 // GetJson expects one or n-parts of a URL to a resource which can either be a local or a remote one.
 // If you provide multiple parts they will be joined together to the final URL.
 // GetJson returns nil or parsed JSON to use in a short code.
-func GetJson(urlParts ...string) interface{} {
+func GetJSON(urlParts ...string) interface{} {
 	url := strings.Join(urlParts, "")
 	c, err := resGetResource(url)
 	if err != nil {
@@ -194,7 +194,7 @@
 }
 
 // parseCsv parses bytes of csv data into a slice slice string or an error
-func parseCsv(c []byte, sep string) ([][]string, error) {
+func parseCSV(c []byte, sep string) ([][]string, error) {
 	if len(sep) != 1 {
 		return nil, errors.New("Incorrect length of csv separator: " + sep)
 	}
@@ -211,7 +211,7 @@
 // The data separator can be a comma, semi-colon, pipe, etc, but only one character.
 // If you provide multiple parts for the URL they will be joined together to the final URL.
 // GetCsv returns nil or a slice slice to use in a short code.
-func GetCsv(sep string, urlParts ...string) [][]string {
+func GetCSV(sep string, urlParts ...string) [][]string {
 	url := strings.Join(urlParts, "")
 	c, err := resGetResource(url)
 	if err != nil {
@@ -218,7 +218,7 @@
 		jww.ERROR.Printf("Failed to get csv resource %s with error message %s", url, err)
 		return nil
 	}
-	d, err := parseCsv(c, sep)
+	d, err := parseCSV(c, sep)
 	if err != nil {
 		jww.ERROR.Printf("Failed to read csv resource %s with error message %s", url, err)
 		return nil
--- a/tpl/template_resources_test.go
+++ b/tpl/template_resources_test.go
@@ -157,7 +157,7 @@
 	}
 }
 
-func TestParseCsv(t *testing.T) {
+func TestParseCSV(t *testing.T) {
 
 	tests := []struct {
 		csv []byte
@@ -173,7 +173,7 @@
 		{[]byte("z|y|c\nd|e|f"), "|", "zycdef", false},
 	}
 	for _, test := range tests {
-		csv, err := parseCsv(test.csv, test.sep)
+		csv, err := parseCSV(test.csv, test.sep)
 		if test.err && err == nil {
 			t.Error("Expecting an error")
 		}
--- a/tpl/template_test.go
+++ b/tpl/template_test.go
@@ -972,7 +972,7 @@
 	}
 }
 
-func TestSafeHtml(t *testing.T) {
+func TestSafeHTML(t *testing.T) {
 	for i, this := range []struct {
 		str                 string
 		tmplStr             string
@@ -997,7 +997,7 @@
 		}
 
 		buf.Reset()
-		err = tmpl.Execute(buf, SafeHtml(this.str))
+		err = tmpl.Execute(buf, SafeHTML(this.str))
 		if err != nil {
 			t.Errorf("[%d] execute template with an escaped string value by SafeHtml returns unexpected error: %s", i, err)
 		}
@@ -1007,7 +1007,7 @@
 	}
 }
 
-func TestSafeHtmlAttr(t *testing.T) {
+func TestSafeHTMLAttr(t *testing.T) {
 	for i, this := range []struct {
 		str                 string
 		tmplStr             string
@@ -1032,7 +1032,7 @@
 		}
 
 		buf.Reset()
-		err = tmpl.Execute(buf, SafeHtmlAttr(this.str))
+		err = tmpl.Execute(buf, SafeHTMLAttr(this.str))
 		if err != nil {
 			t.Errorf("[%d] execute template with an escaped string value by SafeHtmlAttr returns unexpected error: %s", i, err)
 		}
@@ -1042,7 +1042,7 @@
 	}
 }
 
-func TestSafeCss(t *testing.T) {
+func TestSafeCSS(t *testing.T) {
 	for i, this := range []struct {
 		str                 string
 		tmplStr             string
@@ -1067,7 +1067,7 @@
 		}
 
 		buf.Reset()
-		err = tmpl.Execute(buf, SafeCss(this.str))
+		err = tmpl.Execute(buf, SafeCSS(this.str))
 		if err != nil {
 			t.Errorf("[%d] execute template with an escaped string value by SafeCss returns unexpected error: %s", i, err)
 		}
@@ -1077,7 +1077,7 @@
 	}
 }
 
-func TestSafeUrl(t *testing.T) {
+func TestSafeURL(t *testing.T) {
 	for i, this := range []struct {
 		str                 string
 		tmplStr             string
@@ -1102,7 +1102,7 @@
 		}
 
 		buf.Reset()
-		err = tmpl.Execute(buf, SafeUrl(this.str))
+		err = tmpl.Execute(buf, SafeURL(this.str))
 		if err != nil {
 			t.Errorf("[%d] execute template with an escaped string value by SafeUrl returns unexpected error: %s", i, err)
 		}
--- a/transform/absurl.go
+++ b/transform/absurl.go
@@ -4,12 +4,12 @@
 	"sync"
 )
 
-var absUrlInit sync.Once
-var ar *absurlReplacer
+var absURLInit sync.Once
+var ar *absURLReplacer
 
 // for performance reasons, we reuse the first baseUrl given
 func initAbsurlReplacer(baseURL string) {
-	absUrlInit.Do(func() {
+	absURLInit.Do(func() {
 		ar = newAbsurlReplacer(baseURL)
 	})
 }
@@ -18,7 +18,7 @@
 	initAbsurlReplacer(absURL)
 
 	trs = append(trs, func(content []byte) []byte {
-		return ar.replaceInHtml(content)
+		return ar.replaceInHTML(content)
 	})
 	return
 }
@@ -27,7 +27,7 @@
 	initAbsurlReplacer(absURL)
 
 	trs = append(trs, func(content []byte) []byte {
-		return ar.replaceInXml(content)
+		return ar.replaceInXML(content)
 	})
 	return
 }
--- a/transform/absurlreplacer.go
+++ b/transform/absurlreplacer.go
@@ -29,7 +29,7 @@
 	start int // item start position
 	width int // width of last element
 
-	matchers     []absurlMatcher
+	matchers     []absURLMatcher
 	state        stateFunc
 	prefixLookup *prefixes
 
@@ -101,13 +101,13 @@
 
 var mainPrefixRunes = []prefixRunes{{'s', 'r', 'c', '='}, {'h', 'r', 'e', 'f', '='}}
 
-type absurlMatcher struct {
+type absURLMatcher struct {
 	prefix      int
 	match       []byte
 	replacement []byte
 }
 
-func (a absurlMatcher) isSourceType() bool {
+func (a absURLMatcher) isSourceType() bool {
 	return a.prefix == matchPrefixSrc
 }
 
@@ -177,7 +177,7 @@
 	}
 }
 
-func doReplace(content []byte, matchers []absurlMatcher) []byte {
+func doReplace(content []byte, matchers []absURLMatcher) []byte {
 	b := bp.GetBuffer()
 	defer bp.PutBuffer(b)
 
@@ -191,48 +191,48 @@
 	return b.Bytes()
 }
 
-type absurlReplacer struct {
-	htmlMatchers []absurlMatcher
-	xmlMatchers  []absurlMatcher
+type absURLReplacer struct {
+	htmlMatchers []absURLMatcher
+	xmlMatchers  []absURLMatcher
 }
 
-func newAbsurlReplacer(baseUrl string) *absurlReplacer {
-	u, _ := url.Parse(baseUrl)
+func newAbsurlReplacer(baseURL string) *absURLReplacer {
+	u, _ := url.Parse(baseURL)
 	base := strings.TrimRight(u.String(), "/")
 
 	// HTML
-	dqHtmlMatch := []byte("\"/")
-	sqHtmlMatch := []byte("'/")
+	dqHTMLMatch := []byte("\"/")
+	sqHTMLMatch := []byte("'/")
 
 	// XML
-	dqXmlMatch := []byte("&#34;/")
-	sqXmlMatch := []byte("&#39;/")
+	dqXMLMatch := []byte("&#34;/")
+	sqXMLMatch := []byte("&#39;/")
 
-	dqHtml := []byte("\"" + base + "/")
-	sqHtml := []byte("'" + base + "/")
+	dqHTML := []byte("\"" + base + "/")
+	sqHTML := []byte("'" + base + "/")
 
-	dqXml := []byte("&#34;" + base + "/")
-	sqXml := []byte("&#39;" + base + "/")
+	dqXML := []byte("&#34;" + base + "/")
+	sqXML := []byte("&#39;" + base + "/")
 
-	return &absurlReplacer{
-		htmlMatchers: []absurlMatcher{
-			{matchPrefixSrc, dqHtmlMatch, dqHtml},
-			{matchPrefixSrc, sqHtmlMatch, sqHtml},
-			{matchPrefixHref, dqHtmlMatch, dqHtml},
-			{matchPrefixHref, sqHtmlMatch, sqHtml}},
-		xmlMatchers: []absurlMatcher{
-			{matchPrefixSrc, dqXmlMatch, dqXml},
-			{matchPrefixSrc, sqXmlMatch, sqXml},
-			{matchPrefixHref, dqXmlMatch, dqXml},
-			{matchPrefixHref, sqXmlMatch, sqXml},
+	return &absURLReplacer{
+		htmlMatchers: []absURLMatcher{
+			{matchPrefixSrc, dqHTMLMatch, dqHTML},
+			{matchPrefixSrc, sqHTMLMatch, sqHTML},
+			{matchPrefixHref, dqHTMLMatch, dqHTML},
+			{matchPrefixHref, sqHTMLMatch, sqHTML}},
+		xmlMatchers: []absURLMatcher{
+			{matchPrefixSrc, dqXMLMatch, dqXML},
+			{matchPrefixSrc, sqXMLMatch, sqXML},
+			{matchPrefixHref, dqXMLMatch, dqXML},
+			{matchPrefixHref, sqXMLMatch, sqXML},
 		}}
 
 }
 
-func (au *absurlReplacer) replaceInHtml(content []byte) []byte {
+func (au *absURLReplacer) replaceInHTML(content []byte) []byte {
 	return doReplace(content, au.htmlMatchers)
 }
 
-func (au *absurlReplacer) replaceInXml(content []byte) []byte {
+func (au *absURLReplacer) replaceInXML(content []byte) []byte {
 	return doReplace(content, au.xmlMatchers)
 }
--- a/transform/chain_test.go
+++ b/transform/chain_test.go
@@ -54,7 +54,7 @@
 	}
 }
 
-func BenchmarkAbsUrl(b *testing.B) {
+func BenchmarkAbsURL(b *testing.B) {
 	absURL, _ := AbsURL("http://base")
 	tr := NewChain(absURL...)
 
@@ -64,7 +64,7 @@
 	}
 }
 
-func TestAbsUrl(t *testing.T) {
+func TestAbsURL(t *testing.T) {
 	absURL, _ := AbsURL("http://base")
 	tr := NewChain(absURL...)
 
@@ -72,7 +72,7 @@
 
 }
 
-func BenchmarkXmlAbsUrl(b *testing.B) {
+func BenchmarkXMLAbsURL(b *testing.B) {
 	absURLInXML, _ := AbsURLInXML("http://base")
 	tr := NewChain(absURLInXML...)
 
@@ -82,7 +82,7 @@
 	}
 }
 
-func TestXMLAbsUrl(t *testing.T) {
+func TestXMLAbsURL(t *testing.T) {
 	absURLInXML, _ := AbsURLInXML("http://base")
 	tr := NewChain(absURLInXML...)
 	apply(t.Errorf, tr, xml_abs_url_tests)