shithub: hugo

Download patch

ref: ffaec4ca8c4c6fd05b195879ccd65acf2fd5a6ac
parent: 91fb8f1b59cce50de914d66dac1d406655c3c43b
author: Bjørn Erik Pedersen <[email protected]>
date: Thu Mar 15 05:37:30 EDT 2018

Add a way to merge pages by language

As an example:

```html
{{ $pages := .Site.RegularPages | lang.Merge $frSite.RegularPages | lang.Merge $enSite.RegularPages }}
```

Will "fill in the gaps" in the current site with, from left to right, content from the French site, and lastly the English.

Fixes #4463

--- a/hugolib/hugo_sites_build.go
+++ b/hugolib/hugo_sites_build.go
@@ -15,9 +15,12 @@
 
 import (
 	"bytes"
+	"fmt"
 
 	"errors"
 
+	jww "github.com/spf13/jwalterweatherman"
+
 	"github.com/fsnotify/fsnotify"
 	"github.com/gohugoio/hugo/helpers"
 )
@@ -69,6 +72,11 @@
 		h.Log.FEEDBACK.Printf("\nTemplate Metrics:\n\n")
 		h.Log.FEEDBACK.Print(b.String())
 		h.Log.FEEDBACK.Println()
+	}
+
+	errorCount := h.Log.LogCountForLevel(jww.LevelError)
+	if errorCount > 0 {
+		return fmt.Errorf("logged %d error(s)", errorCount)
 	}
 
 	return nil
--- a/hugolib/hugo_sites_build_test.go
+++ b/hugolib/hugo_sites_build_test.go
@@ -1069,6 +1069,7 @@
 
 	mf := siteConfig.Fs
 
+	// TODO(bep) cleanup/remove duplication, use the new testBuilder in testhelpers_test
 	// Add some layouts
 	if err := afero.WriteFile(mf,
 		filepath.Join("layouts", "_default/single.html"),
@@ -1368,7 +1369,7 @@
 }
 
 func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string {
-	filename = filepath.FromSlash(filename)
+	filename = filepath.Clean(filename)
 	b, err := afero.ReadFile(fs, filename)
 	if err != nil {
 		// Print some debug info
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -263,6 +263,16 @@
 	targetPathDescriptorPrototype *targetPathDescriptor
 }
 
+// Sites is a convenience method to get all the Hugo sites/languages configured.
+func (p *Page) Sites() SiteInfos {
+	infos := make(SiteInfos, len(p.s.owner.Sites))
+	for i, site := range p.s.owner.Sites {
+		infos[i] = &site.Info
+	}
+
+	return infos
+}
+
 // SearchKeywords implements the related.Document interface needed for fast page searches.
 func (p *Page) SearchKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
 
--- a/hugolib/pageCache.go
+++ b/hugolib/pageCache.go
@@ -17,31 +17,59 @@
 	"sync"
 )
 
+type pageCacheEntry struct {
+	in  []Pages
+	out Pages
+}
+
+func (entry pageCacheEntry) matches(pageLists []Pages) bool {
+	if len(entry.in) != len(pageLists) {
+		return false
+	}
+	for i, p := range pageLists {
+		if !fastEqualPages(p, entry.in[i]) {
+			return false
+		}
+	}
+
+	return true
+}
+
 type pageCache struct {
 	sync.RWMutex
-	m map[string][][2]Pages
+	m map[string][]pageCacheEntry
 }
 
 func newPageCache() *pageCache {
-	return &pageCache{m: make(map[string][][2]Pages)}
+	return &pageCache{m: make(map[string][]pageCacheEntry)}
 }
 
-// get gets a Pages slice from the cache matching the given key and Pages slice.
-// If none found in cache, a copy of the supplied slice is created.
+// get/getP gets a Pages slice from the cache matching the given key and
+// all the provided Pages slices.
+// If none found in cache, a copy of the first slice is created.
 //
 // If an apply func is provided, that func is applied to the newly created copy.
 //
+// The getP variant' apply func takes a pointer to Pages.
+//
 // The cache and the execution of the apply func is protected by a RWMutex.
-func (c *pageCache) get(key string, p Pages, apply func(p Pages)) (Pages, bool) {
+func (c *pageCache) get(key string, apply func(p Pages), pageLists ...Pages) (Pages, bool) {
+	return c.getP(key, func(p *Pages) {
+		if apply != nil {
+			apply(*p)
+		}
+	}, pageLists...)
+}
+
+func (c *pageCache) getP(key string, apply func(p *Pages), pageLists ...Pages) (Pages, bool) {
 	c.RLock()
 	if cached, ok := c.m[key]; ok {
-		for _, ps := range cached {
-			if fastEqualPages(p, ps[0]) {
+		for _, entry := range cached {
+			if entry.matches(pageLists) {
 				c.RUnlock()
-				return ps[1], true
+				return entry.out, true
 			}
 		}
-
 	}
 	c.RUnlock()
 
@@ -50,23 +78,25 @@
 
 	// double-check
 	if cached, ok := c.m[key]; ok {
-		for _, ps := range cached {
-			if fastEqualPages(p, ps[0]) {
-				return ps[1], true
+		for _, entry := range cached {
+			if entry.matches(pageLists) {
+				return entry.out, true
 			}
 		}
 	}
 
+	p := pageLists[0]
 	pagesCopy := append(Pages(nil), p...)
 
 	if apply != nil {
-		apply(pagesCopy)
+		apply(&pagesCopy)
 	}
 
+	entry := pageCacheEntry{in: pageLists, out: pagesCopy}
 	if v, ok := c.m[key]; ok {
-		c.m[key] = append(v, [2]Pages{p, pagesCopy})
+		c.m[key] = append(v, entry)
 	} else {
-		c.m[key] = [][2]Pages{{p, pagesCopy}}
+		c.m[key] = []pageCacheEntry{entry}
 	}
 
 	return pagesCopy, false
--- a/hugolib/pageCache_test.go
+++ b/hugolib/pageCache_test.go
@@ -14,6 +14,7 @@
 package hugolib
 
 import (
+	"strconv"
 	"sync"
 	"sync/atomic"
 	"testing"
@@ -51,10 +52,10 @@
 			defer wg.Done()
 			for k, pages := range testPageSets {
 				l1.Lock()
-				p, c := c1.get("k1", pages, nil)
+				p, c := c1.get("k1", nil, pages)
 				assert.Equal(t, !atomic.CompareAndSwapUint64(&o1, uint64(k), uint64(k+1)), c)
 				l1.Unlock()
-				p2, c2 := c1.get("k1", p, nil)
+				p2, c2 := c1.get("k1", nil, p)
 				assert.True(t, c2)
 				assert.True(t, fastEqualPages(p, p2))
 				assert.True(t, fastEqualPages(p, pages))
@@ -61,7 +62,7 @@
 				assert.NotNil(t, p)
 
 				l2.Lock()
-				p3, c3 := c1.get("k2", pages, changeFirst)
+				p3, c3 := c1.get("k2", changeFirst, pages)
 				assert.Equal(t, !atomic.CompareAndSwapUint64(&o2, uint64(k), uint64(k+1)), c3)
 				l2.Unlock()
 				assert.NotNil(t, p3)
@@ -70,4 +71,18 @@
 		}()
 	}
 	wg.Wait()
+}
+
+func BenchmarkPageCache(b *testing.B) {
+	cache := newPageCache()
+	pages := make(Pages, 30)
+	for i := 0; i < 30; i++ {
+		pages[i] = &Page{title: "p" + strconv.Itoa(i)}
+	}
+	key := "key"
+
+	b.ResetTimer()
+	for i := 0; i < b.N; i++ {
+		cache.getP(key, nil, pages)
+	}
 }
--- a/hugolib/pageSort.go
+++ b/hugolib/pageSort.go
@@ -14,8 +14,9 @@
 package hugolib
 
 import (
-	"github.com/spf13/cast"
 	"sort"
+
+	"github.com/spf13/cast"
 )
 
 var spc = newPageCache()
@@ -115,7 +116,7 @@
 // This may safely be executed  in parallel.
 func (p Pages) ByWeight() Pages {
 	key := "pageSort.ByWeight"
-	pages, _ := spc.get(key, p, pageBy(defaultPageSort).Sort)
+	pages, _ := spc.get(key, pageBy(defaultPageSort).Sort, p)
 	return pages
 }
 
@@ -132,7 +133,7 @@
 		return p1.title < p2.title
 	}
 
-	pages, _ := spc.get(key, p, pageBy(title).Sort)
+	pages, _ := spc.get(key, pageBy(title).Sort, p)
 	return pages
 }
 
@@ -149,7 +150,7 @@
 		return p1.linkTitle < p2.linkTitle
 	}
 
-	pages, _ := spc.get(key, p, pageBy(linkTitle).Sort)
+	pages, _ := spc.get(key, pageBy(linkTitle).Sort, p)
 
 	return pages
 }
@@ -167,7 +168,7 @@
 		return p1.Date.Unix() < p2.Date.Unix()
 	}
 
-	pages, _ := spc.get(key, p, pageBy(date).Sort)
+	pages, _ := spc.get(key, pageBy(date).Sort, p)
 
 	return pages
 }
@@ -185,7 +186,7 @@
 		return p1.PublishDate.Unix() < p2.PublishDate.Unix()
 	}
 
-	pages, _ := spc.get(key, p, pageBy(pubDate).Sort)
+	pages, _ := spc.get(key, pageBy(pubDate).Sort, p)
 
 	return pages
 }
@@ -203,7 +204,7 @@
 		return p1.ExpiryDate.Unix() < p2.ExpiryDate.Unix()
 	}
 
-	pages, _ := spc.get(key, p, pageBy(expDate).Sort)
+	pages, _ := spc.get(key, pageBy(expDate).Sort, p)
 
 	return pages
 }
@@ -221,7 +222,7 @@
 		return p1.Lastmod.Unix() < p2.Lastmod.Unix()
 	}
 
-	pages, _ := spc.get(key, p, pageBy(date).Sort)
+	pages, _ := spc.get(key, pageBy(date).Sort, p)
 
 	return pages
 }
@@ -239,7 +240,7 @@
 		return len(p1.Content) < len(p2.Content)
 	}
 
-	pages, _ := spc.get(key, p, pageBy(length).Sort)
+	pages, _ := spc.get(key, pageBy(length).Sort, p)
 
 	return pages
 }
@@ -253,7 +254,7 @@
 
 	key := "pageSort.ByLanguage"
 
-	pages, _ := spc.get(key, p, pageBy(languagePageSort).Sort)
+	pages, _ := spc.get(key, pageBy(languagePageSort).Sort, p)
 
 	return pages
 }
@@ -272,7 +273,7 @@
 		}
 	}
 
-	pages, _ := spc.get(key, p, reverseFunc)
+	pages, _ := spc.get(key, reverseFunc, p)
 
 	return pages
 }
@@ -297,7 +298,7 @@
 		return s1 < s2
 	}
 
-	pages, _ := spc.get(key, p, pageBy(paramsKeyComparator).Sort)
+	pages, _ := spc.get(key, pageBy(paramsKeyComparator).Sort, p)
 
 	return pages
 }
--- /dev/null
+++ b/hugolib/pages_language_merge.go
@@ -1,0 +1,61 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"fmt"
+)
+
+var (
+	_ pagesLanguageMerger = (*Pages)(nil)
+)
+
+type pagesLanguageMerger interface {
+	MergeByLanguage(other Pages) Pages
+	// Needed for integration with the tpl package.
+	MergeByLanguageInterface(other interface{}) (interface{}, error)
+}
+
+// MergeByLanguage supplies missing translations in p1 with values from p2.
+// The result is sorted by the default sort order for pages.
+func (p1 Pages) MergeByLanguage(p2 Pages) Pages {
+	merge := func(pages *Pages) {
+		m := make(map[string]bool)
+		for _, p := range *pages {
+			m[p.TranslationKey()] = true
+		}
+
+		for _, p := range p2 {
+			if _, found := m[p.TranslationKey()]; !found {
+				*pages = append(*pages, p)
+			}
+		}
+
+		pages.Sort()
+	}
+
+	out, _ := spc.getP("pages.MergeByLanguage", merge, p1, p2)
+
+	return out
+}
+
+// MergeByLanguageInterface is the generic version of MergeByLanguage. It
+// is here just so it can be called from the tpl package.
+func (p1 Pages) MergeByLanguageInterface(in interface{}) (interface{}, error) {
+	p2, ok := in.(Pages)
+	if !ok {
+		return nil, fmt.Errorf("%T cannot be merged by language", in)
+	}
+	return p1.MergeByLanguage(p2), nil
+}
--- /dev/null
+++ b/hugolib/pages_language_merge_test.go
@@ -1,0 +1,150 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"fmt"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestMergeLanguages(t *testing.T) {
+	t.Parallel()
+	assert := require.New(t)
+
+	b := newTestSiteForLanguageMerge(t, 30)
+	b.CreateSites()
+
+	b.Build(BuildCfg{SkipRender: true})
+
+	h := b.H
+
+	enSite := h.Sites[0]
+	frSite := h.Sites[1]
+	nnSite := h.Sites[2]
+
+	assert.Equal(30, len(enSite.RegularPages))
+	assert.Equal(6, len(frSite.RegularPages))
+	assert.Equal(11, len(nnSite.RegularPages))
+
+	for i := 0; i < 2; i++ {
+		mergedNN := nnSite.RegularPages.MergeByLanguage(enSite.RegularPages)
+		assert.Equal(30, len(mergedNN))
+		for i := 1; i <= 30; i++ {
+			expectedLang := "en"
+			if i == 2 || i%3 == 0 {
+				expectedLang = "nn"
+			}
+			p := mergedNN[i-1]
+			assert.Equal(expectedLang, p.Lang(), fmt.Sprintf("Test %d", i))
+		}
+	}
+
+	mergedFR := frSite.RegularPages.MergeByLanguage(enSite.RegularPages)
+	assert.Equal(30, len(mergedFR))
+	for i := 1; i <= 30; i++ {
+		expectedLang := "en"
+		if i%5 == 0 {
+			expectedLang = "fr"
+		}
+		p := mergedFR[i-1]
+		assert.Equal(expectedLang, p.Lang(), fmt.Sprintf("Test %d", i))
+	}
+
+	firstNN := nnSite.RegularPages[0]
+	assert.Equal(4, len(firstNN.Sites()))
+	assert.Equal("en", firstNN.Sites().First().Language.Lang)
+
+}
+
+func TestMergeLanguagesTemplate(t *testing.T) {
+	t.Parallel()
+
+	b := newTestSiteForLanguageMerge(t, 15)
+	b.WithTemplates("home.html", `
+{{ $pages := .Site.RegularPages }}
+{{ .Scratch.Set "pages" $pages }}
+{{ if eq .Lang "nn" }}:
+{{ $enSite := index .Sites 0 }}
+{{ $frSite := index .Sites 1 }}
+{{ .Scratch.Set "pages" ($pages | lang.Merge $frSite.RegularPages| lang.Merge $enSite.RegularPages) }}
+{{ end }}
+{{ $pages := .Scratch.Get "pages" }}
+{{ range $i, $p := $pages }}{{ add $i 1 }}: {{ .Path }} {{ .Lang }} | {{ end }}
+`,
+		"shortcodes/shortcode.html", "MyShort",
+		"shortcodes/lingo.html", "MyLingo",
+	)
+
+	b.CreateSites()
+	b.Build(BuildCfg{})
+
+	b.AssertFileContent("public/nn/index.html", "p1.md en | 2: p2.nn.md nn | 3: p3.nn.md nn | 4: p4.md en | 5: p5.fr.md fr | 6: p6.nn.md nn | 7: p7.md en | 8: p8.md en | 9: p9.nn.md nn | 10: p10.fr.md fr | 11: p11.md en | 12: p12.nn.md nn | 13: p13.md en | 14: p14.md en | 15: p15.nn.md nn")
+}
+
+func newTestSiteForLanguageMerge(t testing.TB, count int) *sitesBuilder {
+	contentTemplate := `---
+title: doc%d
+weight: %d
+date: "2018-02-28"
+---
+# doc
+*some "content"*
+
+{{< shortcode >}}
+
+{{< lingo >}}
+`
+
+	builder := newTestSitesBuilder(t).WithDefaultMultiSiteConfig()
+
+	// We need some content with some missing translations.
+	// "en" is the main language, so add some English content + some Norwegian (nn, nynorsk) content.
+	var contentPairs []string
+	for i := 1; i <= count; i++ {
+		content := fmt.Sprintf(contentTemplate, i, i)
+		contentPairs = append(contentPairs, []string{fmt.Sprintf("p%d.md", i), content}...)
+		if i == 2 || i%3 == 0 {
+			// Add page 2,3, 6, 9 ... to both languages
+			contentPairs = append(contentPairs, []string{fmt.Sprintf("p%d.nn.md", i), content}...)
+		}
+		if i%5 == 0 {
+			// Add some French content, too.
+			contentPairs = append(contentPairs, []string{fmt.Sprintf("p%d.fr.md", i), content}...)
+		}
+	}
+
+	builder.WithContent(contentPairs...)
+	return builder
+}
+
+func BenchmarkMergeByLanguage(b *testing.B) {
+	const count = 100
+
+	builder := newTestSiteForLanguageMerge(b, count)
+	builder.CreateSites()
+	builder.Build(BuildCfg{SkipRender: true})
+	h := builder.H
+
+	enSite := h.Sites[0]
+	nnSite := h.Sites[2]
+
+	for i := 0; i < b.N; i++ {
+		merged := nnSite.RegularPages.MergeByLanguage(enSite.RegularPages)
+		if len(merged) != count {
+			b.Fatal("Count mismatch")
+		}
+	}
+}
--- a/hugolib/shortcode_test.go
+++ b/hugolib/shortcode_test.go
@@ -673,7 +673,8 @@
 	writeSource(t, fs, "content/sect/mycsvpage.md", fmt.Sprintf(pageTemplateCSVOnly, "Single CSV"))
 	writeSource(t, fs, "content/sect/notfound.md", fmt.Sprintf(pageTemplateShortcodeNotFound, "Single CSV"))
 
-	require.NoError(t, h.Build(BuildCfg{}))
+	err := h.Build(BuildCfg{})
+	require.Equal(t, "logged 1 error(s)", err.Error())
 	require.Len(t, h.Sites, 1)
 
 	s := h.Sites[0]
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -348,6 +348,16 @@
 	return s, nil
 }
 
+type SiteInfos []*SiteInfo
+
+// First is a convenience method to get the first Site, i.e. the main language.
+func (s SiteInfos) First() *SiteInfo {
+	if len(s) == 0 {
+		return nil
+	}
+	return s[0]
+}
+
 type SiteInfo struct {
 	Taxonomies TaxonomyList
 	Authors    AuthorList
--- a/hugolib/site_test.go
+++ b/hugolib/site_test.go
@@ -20,7 +20,6 @@
 	"testing"
 
 	"github.com/markbates/inflect"
-	jww "github.com/spf13/jwalterweatherman"
 
 	"github.com/gohugoio/hugo/helpers"
 
@@ -54,16 +53,8 @@
 
 	withTemplate := createWithTemplateFromNameValues("missing", templateMissingFunc)
 
-	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg, WithTemplate: withTemplate}, BuildCfg{})
+	buildSingleSiteExpected(t, true, deps.DepsCfg{Fs: fs, Cfg: cfg, WithTemplate: withTemplate}, BuildCfg{})
 
-	errCount := s.Log.LogCountForLevelsGreaterThanorEqualTo(jww.LevelError)
-
-	// TODO(bep) clean up the template error handling
-	// The template errors are stored in a slice etc. so we get 4 log entries
-	// When we should get only 1
-	if errCount == 0 {
-		t.Fatalf("Expecting the template to log 1 ERROR, got %d", errCount)
-	}
 }
 
 func TestDraftAndFutureRender(t *testing.T) {
--- a/hugolib/testhelpers_test.go
+++ b/hugolib/testhelpers_test.go
@@ -4,11 +4,12 @@
 	"path/filepath"
 	"testing"
 
-	"regexp"
-
 	"fmt"
+	"regexp"
 	"strings"
 
+	jww "github.com/spf13/jwalterweatherman"
+
 	"github.com/gohugoio/hugo/config"
 	"github.com/gohugoio/hugo/deps"
 	"github.com/spf13/afero"
@@ -23,9 +24,243 @@
 	"log"
 
 	"github.com/gohugoio/hugo/hugofs"
-	jww "github.com/spf13/jwalterweatherman"
 	"github.com/stretchr/testify/require"
 )
+
+const ()
+
+type sitesBuilder struct {
+	Cfg config.Provider
+	Fs  *hugofs.Fs
+	T   testing.TB
+
+	H *HugoSites
+
+	// We will add some default if not set.
+	templatesAdded bool
+	i18nAdded      bool
+	dataAdded      bool
+	contentAdded   bool
+}
+
+func newTestSitesBuilder(t testing.TB) *sitesBuilder {
+	v := viper.New()
+	fs := hugofs.NewMem(v)
+
+	return &sitesBuilder{T: t, Fs: fs}
+}
+
+func (s *sitesBuilder) WithTOMLConfig(conf string) *sitesBuilder {
+	writeSource(s.T, s.Fs, "config.toml", conf)
+	return s
+}
+
+func (s *sitesBuilder) WithDefaultMultiSiteConfig() *sitesBuilder {
+	var defaultMultiSiteConfig = `
+baseURL = "http://example.com/blog"
+
+paginate = 1
+disablePathToLower = true
+defaultContentLanguage = "en"
+defaultContentLanguageInSubdir = true
+
+[permalinks]
+other = "/somewhere/else/:filename"
+
+[blackfriday]
+angledQuotes = true
+
+[Taxonomies]
+tag = "tags"
+
+[Languages]
+[Languages.en]
+weight = 10
+title = "In English"
+languageName = "English"
+[Languages.en.blackfriday]
+angledQuotes = false
+[[Languages.en.menu.main]]
+url    = "/"
+name   = "Home"
+weight = 0
+
+[Languages.fr]
+weight = 20
+title = "Le Français"
+languageName = "Français"
+[Languages.fr.Taxonomies]
+plaque = "plaques"
+
+[Languages.nn]
+weight = 30
+title = "På nynorsk"
+languageName = "Nynorsk"
+paginatePath = "side"
+[Languages.nn.Taxonomies]
+lag = "lag"
+[[Languages.nn.menu.main]]
+url    = "/"
+name   = "Heim"
+weight = 1
+
+[Languages.nb]
+weight = 40
+title = "På bokmål"
+languageName = "Bokmål"
+paginatePath = "side"
+[Languages.nb.Taxonomies]
+lag = "lag"
+`
+
+	return s.WithTOMLConfig(defaultMultiSiteConfig)
+
+}
+
+func (s *sitesBuilder) WithContent(filenameContent ...string) *sitesBuilder {
+	s.contentAdded = true
+	for i := 0; i < len(filenameContent); i += 2 {
+		filename, content := filenameContent[i], filenameContent[i+1]
+		writeSource(s.T, s.Fs, filepath.Join("content", filename), content)
+	}
+	return s
+}
+
+func (s *sitesBuilder) WithTemplates(filenameContent ...string) *sitesBuilder {
+	s.templatesAdded = true
+	for i := 0; i < len(filenameContent); i += 2 {
+		filename, content := filenameContent[i], filenameContent[i+1]
+		writeSource(s.T, s.Fs, filepath.Join("layouts", filename), content)
+	}
+	return s
+}
+
+func (s *sitesBuilder) CreateSites() *sitesBuilder {
+	if !s.templatesAdded {
+		s.addDefaultTemplates()
+	}
+	if !s.i18nAdded {
+		s.addDefaultI18n()
+	}
+	if !s.dataAdded {
+		s.addDefaultData()
+	}
+	if !s.contentAdded {
+		s.addDefaultContent()
+	}
+
+	if s.Cfg == nil {
+		cfg, err := LoadConfig(s.Fs.Source, "", "config.toml")
+		if err != nil {
+			s.T.Fatalf("Failed to load config: %s", err)
+		}
+		s.Cfg = cfg
+	}
+
+	sites, err := NewHugoSites(deps.DepsCfg{Fs: s.Fs, Cfg: s.Cfg})
+	if err != nil {
+		s.T.Fatalf("Failed to create sites: %s", err)
+	}
+	s.H = sites
+
+	return s
+}
+
+func (s *sitesBuilder) Build(cfg BuildCfg) *sitesBuilder {
+	if s.H == nil {
+		s.T.Fatal("Need to run builder.CreateSites first")
+	}
+	err := s.H.Build(cfg)
+	if err != nil {
+		s.T.Fatalf("Build failed: %s", err)
+	}
+
+	return s
+}
+
+func (s *sitesBuilder) addDefaultTemplates() {
+	fs := s.Fs
+	t := s.T
+
+	// Layouts
+
+	writeSource(t, fs, filepath.Join("layouts", "_default/single.html"), "Single: {{ .Title }}|{{ i18n \"hello\" }}|{{.Lang}}|{{ .Content }}")
+	writeSource(t, fs, filepath.Join("layouts", "_default/list.html"), "{{ $p := .Paginator }}List Page {{ $p.PageNumber }}: {{ .Title }}|{{ i18n \"hello\" }}|{{ .Permalink }}|Pager: {{ template \"_internal/pagination.html\" . }}")
+	writeSource(t, fs, filepath.Join("layouts", "index.html"), "{{ $p := .Paginator }}Default Home Page {{ $p.PageNumber }}: {{ .Title }}|{{ .IsHome }}|{{ i18n \"hello\" }}|{{ .Permalink }}|{{  .Site.Data.hugo.slogan }}")
+	writeSource(t, fs, filepath.Join("layouts", "index.fr.html"), "{{ $p := .Paginator }}French Home Page {{ $p.PageNumber }}: {{ .Title }}|{{ .IsHome }}|{{ i18n \"hello\" }}|{{ .Permalink }}|{{  .Site.Data.hugo.slogan }}")
+
+	// Shortcodes
+	writeSource(t, fs, filepath.Join("layouts", "shortcodes", "shortcode.html"), "Shortcode: {{ i18n \"hello\" }}")
+	// A shortcode in multiple languages
+	writeSource(t, fs, filepath.Join("layouts", "shortcodes", "lingo.html"), "LingoDefault")
+	writeSource(t, fs, filepath.Join("layouts", "shortcodes", "lingo.fr.html"), "LingoFrench")
+}
+
+func (s *sitesBuilder) addDefaultI18n() {
+	fs := s.Fs
+	t := s.T
+
+	writeSource(t, fs, filepath.Join("i18n", "en.yaml"), `
+hello:
+  other: "Hello"
+`)
+	writeSource(t, fs, filepath.Join("i18n", "fr.yaml"), `
+hello:
+  other: "Bonjour"
+`)
+
+}
+
+func (s *sitesBuilder) addDefaultData() {
+	fs := s.Fs
+	t := s.T
+
+	writeSource(t, fs, filepath.FromSlash("data/hugo.toml"), "slogan = \"Hugo Rocks!\"")
+}
+
+func (s *sitesBuilder) addDefaultContent() {
+	fs := s.Fs
+	t := s.T
+
+	contentTemplate := `---
+title: doc1
+weight: 1
+tags:
+ - tag1
+date: "2018-02-28"
+---
+# doc1
+*some "content"*
+
+{{< shortcode >}}
+
+{{< lingo >}}
+`
+
+	writeSource(t, fs, filepath.FromSlash("content/sect/doc1.en.md"), contentTemplate)
+	writeSource(t, fs, filepath.FromSlash("content/sect/doc1.fr.md"), contentTemplate)
+	writeSource(t, fs, filepath.FromSlash("content/sect/doc1.nb.md"), contentTemplate)
+	writeSource(t, fs, filepath.FromSlash("content/sect/doc1.nn.md"), contentTemplate)
+}
+
+func (s *sitesBuilder) AssertFileContent(filename string, matches ...string) {
+	content := readDestination(s.T, s.Fs, filename)
+	for _, match := range matches {
+		if !strings.Contains(content, match) {
+			s.T.Fatalf("No match for %q in content for %s\n%q", match, filename, content)
+		}
+	}
+}
+
+func (s *sitesBuilder) AssertFileContentRe(filename string, matches ...string) {
+	content := readDestination(s.T, s.Fs, filename)
+	for _, match := range matches {
+		r := regexp.MustCompile(match)
+		if !r.MatchString(content) {
+			s.T.Fatalf("No match for %q in content for %s\n%q", match, filename, content)
+		}
+	}
+}
 
 type testHelper struct {
 	Cfg config.Provider
--- a/tpl/lang/lang.go
+++ b/tpl/lang/lang.go
@@ -15,6 +15,7 @@
 
 import (
 	"errors"
+	"fmt"
 	"math"
 	"strconv"
 	"strings"
@@ -133,4 +134,16 @@
 	}
 
 	return string(b), nil
+}
+
+type pagesLanguageMerger interface {
+	MergeByLanguageInterface(other interface{}) (interface{}, error)
+}
+
+func (ns *Namespace) Merge(p2, p1 interface{}) (interface{}, error) {
+	merger, ok := p1.(pagesLanguageMerger)
+	if !ok {
+		return nil, fmt.Errorf("language merge not supported for %T", p1)
+	}
+	return merger.MergeByLanguageInterface(p2)
 }