shithub: hugo

Download patch

ref: 5e2d3d2e10a4cf1935c2226b023bde2f3ce9e7c0
parent: 9bf223e584e115569bf2ffe64c3118e54d021a90
author: Cyrill Schumacher <[email protected]>
date: Sat Dec 27 09:40:10 EST 2014

Merge to stay up to date with current dev
Feature: GetJson and GetJson in short codes or other layout files.
For more details please see: http://cyrillschumacher.com/2014/12/21/dynamic-pages-with-gohugo.io/

--- a/commands/hugo.go
+++ b/commands/hugo.go
@@ -56,7 +56,7 @@
 
 //Flags that are to be added to commands.
 var BuildWatch, Draft, Future, UglyUrls, Verbose, Logging, VerboseLog, DisableRSS, DisableSitemap, PluralizeListTitles, NoTimes bool
-var Source, Destination, Theme, BaseUrl, CfgFile, LogFile, Editor string
+var Source, CacheDir, Destination, Theme, BaseUrl, CfgFile, LogFile, Editor string
 
 //Execute adds all child commands to the root command HugoCmd and sets flags appropriately.
 func Execute() {
@@ -83,6 +83,7 @@
 	HugoCmd.PersistentFlags().BoolVar(&DisableRSS, "disableRSS", false, "Do not build RSS files")
 	HugoCmd.PersistentFlags().BoolVar(&DisableSitemap, "disableSitemap", false, "Do not build Sitemap file")
 	HugoCmd.PersistentFlags().StringVarP(&Source, "source", "s", "", "filesystem path to read files relative from")
+	HugoCmd.PersistentFlags().StringVarP(&CacheDir, "cacheDir", "", "$TMPDIR/hugo_cache/", "filesystem path to cache directory")
 	HugoCmd.PersistentFlags().StringVarP(&Destination, "destination", "d", "", "filesystem path to write files to")
 	HugoCmd.PersistentFlags().StringVarP(&Theme, "theme", "t", "", "theme to use (located in /themes/THEMENAME/)")
 	HugoCmd.PersistentFlags().BoolVarP(&Verbose, "verbose", "v", false, "verbose output")
@@ -201,6 +202,15 @@
 	} else {
 		dir, _ := os.Getwd()
 		viper.Set("WorkingDir", dir)
+	}
+
+	if CacheDir != "" {
+		if helpers.FilePathSeparator != CacheDir[len(CacheDir)-1:] {
+			CacheDir = CacheDir + helpers.FilePathSeparator
+		}
+		viper.Set("CacheDir", CacheDir)
+	} else {
+		viper.Set("CacheDir", helpers.GetTempDir("hugo_cache", hugofs.SourceFs))
 	}
 
 	if VerboseLog || Logging || (viper.IsSet("LogFile") && viper.GetString("LogFile") != "") {
--- a/helpers/path.go
+++ b/helpers/path.go
@@ -437,3 +437,28 @@
 	_, err = io.Copy(file, r)
 	return
 }
+
+// GetTempDir returns the OS default temp directory with trailing slash
+// if subPath is not empty then it will be created recursively
+func GetTempDir(subPath string, fs afero.Fs) string {
+	dir := os.TempDir()
+	if FilePathSeparator != dir[len(dir)-1:] {
+		dir = dir + FilePathSeparator
+	}
+	if subPath != "" {
+		dir = dir + MakePath(subPath)
+
+		if exists, _ := Exists(dir, fs); exists {
+			return dir
+		}
+
+		err := fs.MkdirAll(dir, 0777) // rwx, rw, r
+		if err != nil {
+			panic(err)
+		}
+		if FilePathSeparator != dir[len(dir)-1:] {
+			dir = dir + FilePathSeparator
+		}
+	}
+	return dir
+}
--- a/helpers/path_test.go
+++ b/helpers/path_test.go
@@ -647,3 +647,31 @@
 		reader.Seek(0, 0)
 	}
 }
+
+func TestGetTempDir(t *testing.T) {
+	dir := os.TempDir()
+	if FilePathSeparator != dir[len(dir)-1:] {
+		dir = dir + FilePathSeparator
+	}
+	testDir := "hugoTestFolder" + FilePathSeparator
+	tests := []struct {
+		input    string
+		expected string
+	}{
+		{"", dir},
+		{testDir + "  Foo bar  ", dir + testDir + "--Foo-bar" + FilePathSeparator},
+		{testDir + "Foo.Bar/foo_Bar-Foo", dir + testDir + "Foo.Bar/foo_Bar-Foo" + FilePathSeparator},
+		{testDir + "fOO,bar:foo%bAR", dir + testDir + "fOObarfoobAR" + FilePathSeparator},
+		{testDir + "FOo/BaR.html", dir + testDir + "FOo/BaR.html" + FilePathSeparator},
+		{testDir + "трям/трям", dir + testDir + "трям/трям" + FilePathSeparator},
+		{testDir + "은행", dir + testDir + "은행" + FilePathSeparator},
+		{testDir + "Банковский кассир", dir + testDir + "Банковский-кассир" + FilePathSeparator},
+	}
+
+	for _, test := range tests {
+		output := GetTempDir(test.input, new(afero.MemMapFs))
+		if output != test.expected {
+			t.Errorf("Expected %#v, got %#v\n", test.expected, output)
+		}
+	}
+}
--- /dev/null
+++ b/tpl/template_resources.go
@@ -1,0 +1,220 @@
+// Copyright © 2013-14 Steve Francia <[email protected]>.
+//
+// Licensed under the Simple Public License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://opensource.org/licenses/Simple-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tpl
+
+import (
+	"bytes"
+	"encoding/csv"
+	"encoding/json"
+	"errors"
+	"io/ioutil"
+	"net/http"
+	"net/url"
+	"strings"
+	"sync"
+
+	"github.com/spf13/afero"
+	"github.com/spf13/hugo/helpers"
+	"github.com/spf13/hugo/hugofs"
+	jww "github.com/spf13/jwalterweatherman"
+	"github.com/spf13/viper"
+)
+
+var remoteUrlLock = &remoteLock{m: make(map[string]*sync.Mutex)}
+
+type remoteLock struct {
+	sync.RWMutex
+	m map[string]*sync.Mutex
+}
+
+// resLock locks an URL during download
+func (l *remoteLock) UrlLock(url string) {
+	l.Lock()
+	if _, ok := l.m[url]; !ok {
+		l.m[url] = &sync.Mutex{}
+	}
+	l.Unlock() // call this Unlock before the next lock will be called. NFI why but defer doesn't work.
+	l.m[url].Lock()
+}
+
+// resUnlock unlocks an URL when the download has been finished. Use only in defer calls.
+func (l *remoteLock) UrlUnlock(url string) {
+	l.RLock()
+	defer l.RUnlock()
+	if um, ok := l.m[url]; ok {
+		um.Unlock()
+	}
+}
+
+// getFileID returns the cache ID for a string
+func getCacheFileID(id string) string {
+	return viper.GetString("CacheDir") + url.QueryEscape(id)
+}
+
+// resGetCache returns the content for an ID from the file cache or an error
+// if the file is not found returns nil,nil
+func resGetCache(id string, fs afero.Fs) ([]byte, error) {
+	fID := getCacheFileID(id)
+	isExists, err := helpers.Exists(fID, fs)
+	if err != nil {
+		return nil, err
+	}
+	if !isExists {
+		return nil, nil
+	}
+
+	f, err := fs.Open(fID)
+	if err != nil {
+		return nil, err
+	}
+
+	return ioutil.ReadAll(f)
+}
+
+// resWriteCache writes bytes to an ID into the file cache
+func resWriteCache(id string, c []byte, fs afero.Fs) error {
+	fID := getCacheFileID(id)
+	f, err := fs.Create(fID)
+	if err != nil {
+		return err
+	}
+	n, err := f.Write(c)
+	if n == 0 {
+		return errors.New("No bytes written to file: " + fID)
+	}
+	return err
+}
+
+// resGetRemote loads the content of a remote file. This method is thread safe.
+func resGetRemote(url string, fs afero.Fs, hc *http.Client) ([]byte, error) {
+
+	c, err := resGetCache(url, fs)
+	if c != nil && err == nil {
+		return c, nil
+	}
+	if err != nil {
+		return nil, err
+	}
+
+	// avoid race condition with locks, block other goroutines if the current url is processing
+	remoteUrlLock.UrlLock(url)
+	defer func() { remoteUrlLock.UrlUnlock(url) }()
+
+	// avoid multiple locks due to calling resGetCache twice
+	c, err = resGetCache(url, fs)
+	if c != nil && err == nil {
+		return c, nil
+	}
+	if err != nil {
+		return nil, err
+	}
+
+	jww.INFO.Printf("Downloading: %s ...", url)
+	res, err := hc.Get(url)
+	if err != nil {
+		return nil, err
+	}
+	c, err = ioutil.ReadAll(res.Body)
+	res.Body.Close()
+	if err != nil {
+		return nil, err
+	}
+	err = resWriteCache(url, c, fs)
+	if err != nil {
+		return nil, err
+	}
+	jww.INFO.Printf("... and cached to: %s", getCacheFileID(url))
+	return c, nil
+}
+
+// resGetLocal loads the content of a local file
+func resGetLocal(url string, fs afero.Fs) ([]byte, error) {
+	p := ""
+	if viper.GetString("WorkingDir") != "" {
+		p = viper.GetString("WorkingDir")
+		if helpers.FilePathSeparator != p[len(p)-1:] {
+			p = p + helpers.FilePathSeparator
+		}
+	}
+	jFile := p + url
+	if e, err := helpers.Exists(jFile, fs); !e {
+		return nil, err
+	}
+
+	f, err := fs.Open(jFile)
+	if err != nil {
+		return nil, err
+	}
+	return ioutil.ReadAll(f)
+}
+
+// resGetResource loads the content of a local or remote file
+func resGetResource(url string) ([]byte, error) {
+	if url == "" {
+		return nil, nil
+	}
+	if strings.Contains(url, "://") {
+		return resGetRemote(url, hugofs.SourceFs, http.DefaultClient)
+	}
+	return resGetLocal(url, hugofs.SourceFs)
+}
+
+// GetJson expects the url to a resource which can either be a local or a remote one.
+// GetJson returns nil or parsed JSON to use in a short code.
+func GetJson(url string) interface{} {
+	c, err := resGetResource(url)
+	if err != nil {
+		jww.ERROR.Printf("Failed to get json resource %s with error message %s", url, err)
+		return nil
+	}
+
+	var v interface{}
+	err = json.Unmarshal(c, &v)
+	if err != nil {
+		jww.ERROR.Printf("Cannot read json from resource %s with error message %s", url, err)
+		return nil
+	}
+	return v
+}
+
+// parseCsv parses bytes of csv data into a slice slice string or an error
+func parseCsv(c []byte, sep string) ([][]string, error) {
+	if len(sep) != 1 {
+		return nil, errors.New("Incorrect length of csv separator: " + sep)
+	}
+	b := bytes.NewReader(c)
+	r := csv.NewReader(b)
+	rSep := []rune(sep)
+	r.Comma = rSep[0]
+	r.FieldsPerRecord = 0
+	return r.ReadAll()
+}
+
+// GetCsv expects the url to a resource which can either be a local or a remote one and the type
+// of the data separator which can be comma, semi-colon, pipe, but only one character.
+// GetCsv returns nil or a slice slice to use in a short code.
+func GetCsv(url string, sep string) [][]string {
+
+	c, err := resGetResource(url)
+	if err != nil {
+		jww.ERROR.Printf("Failed to get csv resource %s with error message %s", url, err)
+		return nil
+	}
+	d, err := parseCsv(c, sep)
+	if err != nil {
+		jww.ERROR.Printf("Failed to read csv resource %s with error message %s", url, err)
+		return nil
+	}
+	return d
+}
--- /dev/null
+++ b/tpl/template_resources_test.go
@@ -1,0 +1,181 @@
+// Copyright © 2013-14 Steve Francia <[email protected]>.
+//
+// Licensed under the Simple Public License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://opensource.org/licenses/Simple-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tpl
+
+import (
+	"bytes"
+	"net/http"
+	"net/http/httptest"
+	"net/url"
+	"strings"
+	"testing"
+
+	"github.com/spf13/afero"
+	"github.com/spf13/hugo/helpers"
+)
+
+func TestScpCache(t *testing.T) {
+
+	tests := []struct {
+		path    string
+		content []byte
+	}{
+		{"http://Foo.Bar/foo_Bar-Foo", []byte(`T€st Content 123`)},
+		{"fOO,bar:foo%bAR", []byte(`T€st Content 123 fOO,bar:foo%bAR`)},
+		{"FOo/BaR.html", []byte(`FOo/BaR.html T€st Content 123`)},
+		{"трям/трям", []byte(`T€st трям/трям Content 123`)},
+		{"은행", []byte(`T€st C은행ontent 123`)},
+		{"Банковский кассир", []byte(`Банковский кассир T€st Content 123`)},
+	}
+
+	fs := new(afero.MemMapFs)
+
+	for _, test := range tests {
+		c, err := resGetCache(test.path, fs)
+		if err != nil {
+			t.Errorf("Error getting cache: %s", err)
+		}
+		if c != nil {
+			t.Errorf("There is content where there should not be anything: %s", string(c))
+		}
+
+		err = resWriteCache(test.path, test.content, fs)
+		if err != nil {
+			t.Errorf("Error writing cache: %s", err)
+		}
+
+		c, err = resGetCache(test.path, fs)
+		if err != nil {
+			t.Errorf("Error getting cache after writing: %s", err)
+		}
+		if bytes.Compare(c, test.content) != 0 {
+			t.Errorf("\nExpected: %s\nActual: %s\n", string(test.content), string(c))
+		}
+	}
+}
+
+func TestScpGetLocal(t *testing.T) {
+	fs := new(afero.MemMapFs)
+	ps := helpers.FilePathSeparator
+	tests := []struct {
+		path    string
+		content []byte
+	}{
+		{"testpath" + ps + "test.txt", []byte(`T€st Content 123 fOO,bar:foo%bAR`)},
+		{"FOo" + ps + "BaR.html", []byte(`FOo/BaR.html T€st Content 123`)},
+		{"трям" + ps + "трям", []byte(`T€st трям/трям Content 123`)},
+		{"은행", []byte(`T€st C은행ontent 123`)},
+		{"Банковский кассир", []byte(`Банковский кассир T€st Content 123`)},
+	}
+
+	for _, test := range tests {
+		r := bytes.NewReader(test.content)
+		err := helpers.WriteToDisk(test.path, r, fs)
+		if err != nil {
+			t.Error(err)
+		}
+
+		c, err := resGetLocal(test.path, fs)
+		if err != nil {
+			t.Errorf("Error getting resource content: %s", err)
+		}
+		if bytes.Compare(c, test.content) != 0 {
+			t.Errorf("\nExpected: %s\nActual: %s\n", string(test.content), string(c))
+		}
+	}
+
+}
+
+func getTestServer(handler func(w http.ResponseWriter, r *http.Request)) (*httptest.Server, *http.Client) {
+	testServer := httptest.NewServer(http.HandlerFunc(handler))
+	client := &http.Client{
+		Transport: &http.Transport{Proxy: func(*http.Request) (*url.URL, error) { return url.Parse(testServer.URL) }},
+	}
+	return testServer, client
+}
+
+func TestScpGetRemote(t *testing.T) {
+	fs := new(afero.MemMapFs)
+
+	tests := []struct {
+		path    string
+		content []byte
+	}{
+		{"http://Foo.Bar/foo_Bar-Foo", []byte(`T€st Content 123`)},
+		{"http://Doppel.Gänger/foo_Bar-Foo", []byte(`T€st Cont€nt 123`)},
+		{"http://Doppel.Gänger/Fizz_Bazz-Foo", []byte(`T€st Банковский кассир Cont€nt 123`)},
+	}
+
+	for _, test := range tests {
+
+		srv, cl := getTestServer(func(w http.ResponseWriter, r *http.Request) {
+			w.Write(test.content)
+		})
+		defer func() { srv.Close() }()
+
+		c, err := resGetRemote(test.path, fs, cl)
+		if err != nil {
+			t.Errorf("Error getting resource content: %s", err)
+		}
+		if bytes.Compare(c, test.content) != 0 {
+			t.Errorf("\nNet Expected: %s\nNet Actual: %s\n", string(test.content), string(c))
+		}
+		cc, cErr := resGetCache(test.path, fs)
+		if cErr != nil {
+			t.Error(cErr)
+		}
+		if bytes.Compare(cc, test.content) != 0 {
+			t.Errorf("\nCache Expected: %s\nCache Actual: %s\n", string(test.content), string(c))
+		}
+	}
+}
+
+func TestParseCsv(t *testing.T) {
+
+	tests := []struct {
+		csv []byte
+		sep string
+		exp string
+		err bool
+	}{
+		{[]byte("a,b,c\nd,e,f\n"), "", "", true},
+		{[]byte("a,b,c\nd,e,f\n"), "~/", "", true},
+		{[]byte("a,b,c\nd,e,f"), "|", "a,b,cd,e,f", false},
+		{[]byte("q,w,e\nd,e,f"), ",", "qwedef", false},
+		{[]byte("a|b|c\nd|e|f|g"), "|", "abcdefg", true},
+		{[]byte("z|y|c\nd|e|f"), "|", "zycdef", false},
+	}
+	for _, test := range tests {
+		csv, err := parseCsv(test.csv, test.sep)
+		if test.err && err == nil {
+			t.Error("Expecting an error")
+		}
+		if test.err {
+			continue
+		}
+		if !test.err && err != nil {
+			t.Error(err)
+		}
+
+		act := ""
+		for _, v := range csv {
+			act = act + strings.Join(v, "")
+		}
+
+		if act != test.exp {
+			t.Errorf("\nExpected: %s\nActual: %s\n%#v\n", test.exp, act, csv)
+		}
+
+	}
+}