shithub: hugo

Download patch

ref: beaa8b1bcabd4be25ac26bea39ab9f7290147e67
parent: e522e5f4154cb6a5d960aeb8920fa3e433641cf6
author: bep <[email protected]>
date: Fri May 15 20:11:39 EDT 2015

Add support for URLs relative to context root

Setting `RelativeURLs` to `true` will make all relative URLs in the site *really* relative.

And will do so with speed.

So:

In `/post/myblogpost.html`:

`/mycss.css` becomes `../mycss.css`

The same in `/index.html` will become:

`./mycss.css` etc.

Note that absolute URLs will not be touched (either external resources, or URLs constructed with `BaseURL`).

The speediness is about the same as before:

```
benchmark                    old ns/op     new ns/op     delta
BenchmarkAbsURL              17462         18164         +4.02%
BenchmarkAbsURLSrcset        18842         19632         +4.19%
BenchmarkXMLAbsURLSrcset     18643         19313         +3.59%
BenchmarkXMLAbsURL           9283          9656          +4.02%

benchmark                    old allocs     new allocs     delta
BenchmarkAbsURL              24             28             +16.67%
BenchmarkAbsURLSrcset        29             32             +10.34%
BenchmarkXMLAbsURLSrcset     27             30             +11.11%
BenchmarkXMLAbsURL           12             14             +16.67%

benchmark                    old bytes     new bytes     delta
BenchmarkAbsURL              3154          3404          +7.93%
BenchmarkAbsURLSrcset        2376          2573          +8.29%
BenchmarkXMLAbsURLSrcset     2569          2763          +7.55%
BenchmarkXMLAbsURL           1888          1998          +5.83%

```

Fixes #1104
Fixes #622
Fixes #937
Fixes #157

--- a/commands/hugo.go
+++ b/commands/hugo.go
@@ -139,6 +139,7 @@
 	viper.SetDefault("Verbose", false)
 	viper.SetDefault("IgnoreCache", false)
 	viper.SetDefault("CanonifyURLs", false)
+	viper.SetDefault("RelativeURLs", false)
 	viper.SetDefault("Taxonomies", map[string]string{"tag": "tags", "category": "categories"})
 	viper.SetDefault("Permalinks", make(hugolib.PermalinkOverrides, 0))
 	viper.SetDefault("Sitemap", hugolib.Sitemap{Priority: -1})
--- a/helpers/path.go
+++ b/helpers/path.go
@@ -232,6 +232,41 @@
 	return inPath, errors.New("Can't extract relative path, unknown prefix")
 }
 
+// Should be good enough for Hugo.
+var isFileRe = regexp.MustCompile(".*\\..{1,6}$")
+
+// Expects a relative path starting after the content directory.
+func GetDottedRelativePath(inPath string) string {
+	inPath = filepath.Clean(filepath.FromSlash(inPath))
+	if inPath == "." {
+		return "./"
+	}
+	isFile := isFileRe.MatchString(inPath)
+	if !isFile {
+		if !strings.HasSuffix(inPath, FilePathSeparator) {
+			inPath += FilePathSeparator
+		}
+	}
+	if !strings.HasPrefix(inPath, FilePathSeparator) {
+		inPath = FilePathSeparator + inPath
+	}
+	dir, _ := filepath.Split(inPath)
+
+	sectionCount := strings.Count(dir, FilePathSeparator)
+
+	if sectionCount == 0 || dir == FilePathSeparator {
+		return "./"
+	}
+
+	var dottedPath string
+
+	for i := 1; i < sectionCount; i++ {
+		dottedPath += "../"
+	}
+
+	return dottedPath
+}
+
 // Filename takes a path, strips out the extension,
 // and returns the name of the file.
 func Filename(in string) (name string) {
--- a/helpers/path_test.go
+++ b/helpers/path_test.go
@@ -112,6 +112,45 @@
 	}
 }
 
+func TestGetDottedRelativePath(t *testing.T) {
+	// on Windows this will receive both kinds, both country and western ...
+	for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} {
+		doTestGetDottedRelativePath(f, t)
+	}
+
+}
+
+func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) {
+	type test struct {
+		input, expected string
+	}
+	data := []test{
+		{"", "./"},
+		{urlFixer("/"), "./"},
+		{urlFixer("post"), "../"},
+		{urlFixer("/post"), "../"},
+		{urlFixer("post/"), "../"},
+		{urlFixer("tags/foo.html"), "../"},
+		{urlFixer("/tags/foo.html"), "../"},
+		{urlFixer("/post/"), "../"},
+		{urlFixer("////post/////"), "../"},
+		{urlFixer("/foo/bar/index.html"), "../../"},
+		{urlFixer("/foo/bar/foo/"), "../../../"},
+		{urlFixer("/foo/bar/foo"), "../../../"},
+		{urlFixer("foo/bar/foo/"), "../../../"},
+		{urlFixer("foo/bar/foo/bar"), "../../../../"},
+		{"404.html", "./"},
+		{"404.xml", "./"},
+		{"/404.html", "./"},
+	}
+	for i, d := range data {
+		output := GetDottedRelativePath(d.input)
+		if d.expected != output {
+			t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
+		}
+	}
+}
+
 func TestMakeTitle(t *testing.T) {
 	type test struct {
 		input, expected string
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -1397,16 +1397,21 @@
 
 	err := s.render(name, d, renderBuffer, layouts...)
 
-	absURLInXML, err := transform.AbsURLInXML()
-	if err != nil {
-		return err
-	}
-
 	outBuffer := bp.GetBuffer()
 	defer bp.PutBuffer(outBuffer)
 
-	transformer := transform.NewChain(absURLInXML...)
-	transformer.Apply(outBuffer, renderBuffer)
+	var path []byte
+	if viper.GetBool("RelativeURLs") {
+		path = []byte(helpers.GetDottedRelativePath(dest))
+	} else {
+		s := viper.GetString("BaseURL")
+		if !strings.HasSuffix(s, "/") {
+			s += "/"
+		}
+		path = []byte(s)
+	}
+	transformer := transform.NewChain(transform.AbsURLInXML)
+	transformer.Apply(outBuffer, renderBuffer, path)
 
 	if err == nil {
 		err = s.WriteDestFile(dest, outBuffer)
@@ -1426,12 +1431,8 @@
 
 	transformLinks := transform.NewEmptyTransforms()
 
-	if viper.GetBool("CanonifyURLs") {
-		absURL, err := transform.AbsURL()
-		if err != nil {
-			return err
-		}
-		transformLinks = append(transformLinks, absURL...)
+	if viper.GetBool("RelativeURLs") || viper.GetBool("CanonifyURLs") {
+		transformLinks = append(transformLinks, transform.AbsURL)
 	}
 
 	if viper.GetBool("watch") && !viper.GetBool("DisableLiveReload") {
@@ -1438,8 +1439,24 @@
 		transformLinks = append(transformLinks, transform.LiveReloadInject)
 	}
 
+	var path []byte
+
+	if viper.GetBool("RelativeURLs") {
+		translated, err := s.PageTarget().(target.OptionalTranslator).TranslateRelative(dest)
+		if err != nil {
+			return err
+		}
+		path = []byte(helpers.GetDottedRelativePath(translated))
+	} else if viper.GetBool("CanonifyURLs") {
+		s := viper.GetString("BaseURL")
+		if !strings.HasSuffix(s, "/") {
+			s += "/"
+		}
+		path = []byte(s)
+	}
+
 	transformer := transform.NewChain(transformLinks...)
-	transformer.Apply(outBuffer, renderBuffer)
+	transformer.Apply(outBuffer, renderBuffer, path)
 
 	if err == nil {
 		if err = s.WriteDestPage(dest, outBuffer); err != nil {
--- a/target/file.go
+++ b/target/file.go
@@ -16,6 +16,11 @@
 	Translate(string) (string, error)
 }
 
+// TODO(bep) consider other ways to solve this.
+type OptionalTranslator interface {
+	TranslateRelative(string) (string, error)
+}
+
 type Output interface {
 	Publisher
 	Translator
--- a/target/page.go
+++ b/target/page.go
@@ -32,10 +32,18 @@
 }
 
 func (pp *PagePub) Translate(src string) (dest string, err error) {
+	dir, err := pp.TranslateRelative(src)
+	if err != nil {
+		return dir, err
+	}
+	if pp.PublishDir != "" {
+		dir = filepath.Join(pp.PublishDir, dir)
+	}
+	return dir, nil
+}
+
+func (pp *PagePub) TranslateRelative(src string) (dest string, err error) {
 	if src == helpers.FilePathSeparator {
-		if pp.PublishDir != "" {
-			return filepath.Join(pp.PublishDir, "index.html"), nil
-		}
 		return "index.html", nil
 	}
 
@@ -43,9 +51,6 @@
 	isRoot := dir == ""
 	ext := pp.extension(filepath.Ext(file))
 	name := filename(file)
-	if pp.PublishDir != "" {
-		dir = filepath.Join(pp.PublishDir, dir)
-	}
 
 	if pp.UglyURLs || file == "index.html" || (isRoot && file == "404.html") {
 		return filepath.Join(dir, fmt.Sprintf("%s%s", name, ext)), nil
--- a/transform/absurl.go
+++ b/transform/absurl.go
@@ -1,58 +1,11 @@
 package transform
 
-import (
-	"github.com/spf13/viper"
-	"sync"
-)
+var ar *absURLReplacer = newAbsURLReplacer()
 
-// to be used in tests; the live site will get its value from Viper.
-var AbsBaseUrl string
-
-var absURLInit sync.Once
-var ar *absURLReplacer
-
-func AbsURL() (trs []link, err error) {
-	initAbsURLReplacer()
-	return absURLFromReplacer(ar)
+var AbsURL = func(ct contentTransformer) {
+	ar.replaceInHTML(ct)
 }
 
-func absURLFromURL(URL string) (trs []link, err error) {
-	return absURLFromReplacer(newAbsURLReplacer(URL))
-}
-
-func absURLFromReplacer(ar *absURLReplacer) (trs []link, err error) {
-	trs = append(trs, func(ct contentTransformer) {
-		ar.replaceInHTML(ct)
-	})
-	return
-}
-
-func AbsURLInXML() (trs []link, err error) {
-	initAbsURLReplacer()
-	return absURLInXMLFromReplacer(ar)
-}
-
-func absURLInXMLFromURL(URL string) (trs []link, err error) {
-	return absURLInXMLFromReplacer(newAbsURLReplacer(URL))
-}
-
-func absURLInXMLFromReplacer(ar *absURLReplacer) (trs []link, err error) {
-	trs = append(trs, func(ct contentTransformer) {
-		ar.replaceInXML(ct)
-	})
-	return
-}
-
-func initAbsURLReplacer() {
-	absURLInit.Do(func() {
-		var url string
-
-		if AbsBaseUrl != "" {
-			url = AbsBaseUrl
-		} else {
-			url = viper.GetString("BaseURL")
-		}
-
-		ar = newAbsURLReplacer(url)
-	})
+var AbsURLInXML = func(ct contentTransformer) {
+	ar.replaceInXML(ct)
 }
--- a/transform/absurlreplacer.go
+++ b/transform/absurlreplacer.go
@@ -3,8 +3,6 @@
 import (
 	"bytes"
 	"io"
-	"net/url"
-	"strings"
 	"unicode/utf8"
 )
 
@@ -23,6 +21,9 @@
 	// the target for the new absurlified content
 	w io.Writer
 
+	// path may be set to a "." relative path
+	path []byte
+
 	pos   int // input position
 	start int // item start position
 	width int // width of last element
@@ -54,9 +55,8 @@
 }
 
 type absURLMatcher struct {
-	match          []byte
-	quote          []byte
-	replacementURL []byte
+	match []byte
+	quote []byte
 }
 
 // match check rune inside word. Will be != ' '.
@@ -147,7 +147,7 @@
 		}
 		l.pos += len(m.match)
 		l.w.Write(m.quote)
-		l.w.Write(m.replacementURL)
+		l.w.Write(l.path)
 		l.start = l.pos
 	}
 }
@@ -188,7 +188,7 @@
 		l.w.Write([]byte(m.quote))
 		for i, f := range fields {
 			if f[0] == '/' {
-				l.w.Write(m.replacementURL)
+				l.w.Write(l.path)
 				l.w.Write(f[1:])
 
 			} else {
@@ -252,9 +252,11 @@
 }
 
 func doReplace(ct contentTransformer, matchers []absURLMatcher) {
+
 	lexer := &absurllexer{
 		content:  ct.Content(),
 		w:        ct,
+		path:     ct.Path(),
 		matchers: matchers}
 
 	lexer.replace()
@@ -265,9 +267,7 @@
 	xmlMatchers  []absURLMatcher
 }
 
-func newAbsURLReplacer(baseURL string) *absURLReplacer {
-	u, _ := url.Parse(baseURL)
-	base := []byte(strings.TrimRight(u.String(), "/") + "/")
+func newAbsURLReplacer() *absURLReplacer {
 
 	// HTML
 	dqHTMLMatch := []byte("\"/")
@@ -285,14 +285,13 @@
 
 	return &absURLReplacer{
 		htmlMatchers: []absURLMatcher{
-			{dqHTMLMatch, dqHTML, base},
-			{sqHTMLMatch, sqHTML, base},
+			{dqHTMLMatch, dqHTML},
+			{sqHTMLMatch, sqHTML},
 		},
 		xmlMatchers: []absURLMatcher{
-			{dqXMLMatch, dqXML, base},
-			{sqXMLMatch, sqXML, base},
+			{dqXMLMatch, dqXML},
+			{sqXMLMatch, sqXML},
 		}}
-
 }
 
 func (au *absURLReplacer) replaceInHTML(ct contentTransformer) {
--- a/transform/chain.go
+++ b/transform/chain.go
@@ -23,6 +23,7 @@
 // contentTransformer is an interface that enables rotation  of pooled buffers
 // in the transformer chain.
 type contentTransformer interface {
+	Path() []byte
 	Content() []byte
 	io.Writer
 }
@@ -30,10 +31,15 @@
 // Implements contentTransformer
 // Content is read from the from-buffer and rewritten to to the to-buffer.
 type fromToBuffer struct {
+	path []byte
 	from *bytes.Buffer
 	to   *bytes.Buffer
 }
 
+func (ft fromToBuffer) Path() []byte {
+	return ft.path
+}
+
 func (ft fromToBuffer) Write(p []byte) (n int, err error) {
 	return ft.to.Write(p)
 }
@@ -42,7 +48,7 @@
 	return ft.from.Bytes()
 }
 
-func (c *chain) Apply(w io.Writer, r io.Reader) error {
+func (c *chain) Apply(w io.Writer, r io.Reader, p []byte) error {
 
 	b1 := bp.GetBuffer()
 	defer bp.PutBuffer(b1)
@@ -57,7 +63,7 @@
 	b2 := bp.GetBuffer()
 	defer bp.PutBuffer(b2)
 
-	fb := &fromToBuffer{from: b1, to: b2}
+	fb := &fromToBuffer{path: p, from: b1, to: b2}
 
 	for i, tr := range *c {
 		if i > 0 {
--- a/transform/chain_test.go
+++ b/transform/chain_test.go
@@ -3,6 +3,7 @@
 import (
 	"bytes"
 	"github.com/spf13/hugo/helpers"
+	"path/filepath"
 	"strings"
 	"testing"
 )
@@ -62,6 +63,11 @@
 schemaless2: &lt;img srcset=&quot;//img.jpg&quot; src=&quot;//basic.jpg2&gt; POST
 `
 
+const REL_PATH_VARIATIONS = `PRE. a href="/img/small.jpg" POST.`
+const REL_PATH_VARIATIONS_CORRECT = `PRE. a href="../../img/small.jpg" POST.`
+
+const testBaseURL = "http://base/"
+
 var abs_url_bench_tests = []test{
 	{H5_JS_CONTENT_DOUBLE_QUOTE, CORRECT_OUTPUT_SRC_HREF_DQ},
 	{H5_JS_CONTENT_SINGLE_QUOTE, CORRECT_OUTPUT_SRC_HREF_SQ},
@@ -85,11 +91,13 @@
 	{SRCSET_XML_SINGLE_QUOTE, SRCSET_XML_SINGLE_QUOTE_CORRECT},
 	{SRCSET_XML_VARIATIONS, SRCSET_XML_VARIATIONS_CORRECT}}
 
+var relurl_tests = []test{{REL_PATH_VARIATIONS, REL_PATH_VARIATIONS_CORRECT}}
+
 func TestChainZeroTransformers(t *testing.T) {
 	tr := NewChain()
 	in := new(bytes.Buffer)
 	out := new(bytes.Buffer)
-	if err := tr.Apply(in, out); err != nil {
+	if err := tr.Apply(in, out, []byte("")); err != nil {
 		t.Errorf("A zero transformer chain returned an error.")
 	}
 }
@@ -112,7 +120,7 @@
 	tr := NewChain(f1, f2, f3, f4)
 
 	out := new(bytes.Buffer)
-	if err := tr.Apply(out, helpers.StringToReader("Test: f4 f3 f1 f2 f1 The End.")); err != nil {
+	if err := tr.Apply(out, helpers.StringToReader("Test: f4 f3 f1 f2 f1 The End."), []byte("")); err != nil {
 		t.Errorf("Multi transformer chain returned an error: %s", err)
 	}
 
@@ -124,8 +132,7 @@
 }
 
 func BenchmarkAbsURL(b *testing.B) {
-	absURL, _ := absURLFromURL("http://base")
-	tr := NewChain(absURL...)
+	tr := NewChain(AbsURL)
 
 	b.ResetTimer()
 	for i := 0; i < b.N; i++ {
@@ -134,8 +141,7 @@
 }
 
 func BenchmarkAbsURLSrcset(b *testing.B) {
-	absURL, _ := absURLFromURL("http://base")
-	tr := NewChain(absURL...)
+	tr := NewChain(AbsURL)
 
 	b.ResetTimer()
 	for i := 0; i < b.N; i++ {
@@ -144,8 +150,7 @@
 }
 
 func BenchmarkXMLAbsURLSrcset(b *testing.B) {
-	absXMLURL, _ := absURLInXMLFromURL("http://base")
-	tr := NewChain(absXMLURL...)
+	tr := NewChain(AbsURLInXML)
 
 	b.ResetTimer()
 	for i := 0; i < b.N; i++ {
@@ -154,31 +159,33 @@
 }
 
 func TestAbsURL(t *testing.T) {
-	absURL, _ := absURLFromURL("http://base")
-	tr := NewChain(absURL...)
+	tr := NewChain(AbsURL)
 
 	apply(t.Errorf, tr, abs_url_tests)
 
 }
 
+func TestRelativeURL(t *testing.T) {
+	tr := NewChain(AbsURL)
+
+	applyWithPath(t.Errorf, tr, relurl_tests, helpers.GetDottedRelativePath(filepath.FromSlash("/post/sub/")))
+
+}
+
 func TestAbsURLSrcSet(t *testing.T) {
-	absURL, _ := absURLFromURL("http://base")
-	tr := NewChain(absURL...)
+	tr := NewChain(AbsURL)
 
 	apply(t.Errorf, tr, srcset_tests)
 }
 
 func TestAbsXMLURLSrcSet(t *testing.T) {
-	absURLInXML, _ := absURLInXMLFromURL("http://base")
-	tr := NewChain(absURLInXML...)
+	tr := NewChain(AbsURLInXML)
 
 	apply(t.Errorf, tr, srcset_xml_tests)
-
 }
 
 func BenchmarkXMLAbsURL(b *testing.B) {
-	absURLInXML, _ := absURLInXMLFromURL("http://base")
-	tr := NewChain(absURLInXML...)
+	tr := NewChain(AbsURLInXML)
 
 	b.ResetTimer()
 	for i := 0; i < b.N; i++ {
@@ -187,17 +194,17 @@
 }
 
 func TestXMLAbsURL(t *testing.T) {
-	absURLInXML, _ := absURLInXMLFromURL("http://base")
-	tr := NewChain(absURLInXML...)
+	tr := NewChain(AbsURLInXML)
 	apply(t.Errorf, tr, xml_abs_url_tests)
 }
 
 type errorf func(string, ...interface{})
 
-func apply(ef errorf, tr chain, tests []test) {
+func applyWithPath(ef errorf, tr chain, tests []test, path string) {
 	for _, test := range tests {
 		out := new(bytes.Buffer)
-		err := tr.Apply(out, strings.NewReader(test.content))
+		var err error
+		err = tr.Apply(out, strings.NewReader(test.content), []byte(path))
 		if err != nil {
 			ef("Unexpected error: %s", err)
 		}
@@ -205,6 +212,10 @@
 			ef("Expected:\n%s\nGot:\n%s", test.expected, string(out.Bytes()))
 		}
 	}
+}
+
+func apply(ef errorf, tr chain, tests []test) {
+	applyWithPath(ef, tr, tests, testBaseURL)
 }
 
 type test struct {