shithub: hugo

Download patch

ref: 60bd332c1f68e49e6ac439047e7c660865189380
parent: 6a30874f19610a38e846e120aac03c68e12f9b7b
author: Bjørn Erik Pedersen <[email protected]>
date: Sat Oct 14 09:40:43 EDT 2017

Only re-render the view(s) you're working on

Hugo already, in its server mode,  support partial rebuilds. To put it simply: If you change `about.md`, only that content page is read and processed, then Hugo does some processing (taxonomies etc.) and the full site is rendered.
This commit covers the rendering part: We now only re-render the pages you work on, i.e. the last n pages you watched in the browser (which obviously also includes the  page in the example above).

To be more specific: When you are running the hugo server in watch (aka. livereload) mode, and change a template or a content file, then we do a partial re-rendering of the following:

* The current content page (if it is a content change)
* The home page
* Up to the last 10 pages you visited on the site.

This should in most cases be enough, but if you navigate to something completely different, you may see stale content. Doing an edit will then refresh that page.

Note that this feature is enabled by default. To turn it off, run `hugo server --disableFastRender`.

Fixes #3962
See  #1643

--- a/commands/commandeer.go
+++ b/commands/commandeer.go
@@ -14,6 +14,7 @@
 package commands
 
 import (
+	"github.com/gohugoio/hugo/common/types"
 	"github.com/gohugoio/hugo/deps"
 	"github.com/gohugoio/hugo/helpers"
 	"github.com/gohugoio/hugo/hugofs"
@@ -21,8 +22,9 @@
 
 type commandeer struct {
 	*deps.DepsCfg
-	pathSpec   *helpers.PathSpec
-	configured bool
+	pathSpec    *helpers.PathSpec
+	visitedURLs *types.EvictingStringQueue
+	configured  bool
 }
 
 func (c *commandeer) Set(key string, value interface{}) {
@@ -58,5 +60,6 @@
 	if err != nil {
 		return nil, err
 	}
-	return &commandeer{DepsCfg: cfg, pathSpec: ps}, nil
+
+	return &commandeer{DepsCfg: cfg, pathSpec: ps, visitedURLs: types.NewEvictingStringQueue(10)}, nil
 }
--- a/commands/hugo.go
+++ b/commands/hugo.go
@@ -768,7 +768,12 @@
 	if err := c.initSites(); err != nil {
 		return err
 	}
-	return Hugo.Build(hugolib.BuildCfg{PrintStats: !quiet, Watching: true}, events...)
+	visited := c.visitedURLs.PeekAllSet()
+	if !c.Cfg.GetBool("disableFastRender") {
+		// Make sure we always render the home page
+		visited["/"] = true
+	}
+	return Hugo.Build(hugolib.BuildCfg{PrintStats: !quiet, Watching: true, RecentlyVisited: visited}, events...)
 }
 
 // newWatcher creates a new watcher to watch filesystem events.
@@ -986,6 +991,16 @@
 				}
 
 				if len(dynamicEvents) > 0 {
+					doLiveReload := !buildWatch && !c.Cfg.GetBool("disableLiveReload")
+					onePageName := pickOneWriteOrCreatePath(dynamicEvents)
+
+					if onePageName != "" && doLiveReload && !c.Cfg.GetBool("disableFastRender") {
+						p := Hugo.GetContentPage(onePageName)
+						if p != nil {
+							c.visitedURLs.Add(p.RelPermalink())
+						}
+
+					}
 					c.Logger.FEEDBACK.Println("\nChange detected, rebuilding site")
 					const layout = "2006-01-02 15:04 -0700"
 					c.Logger.FEEDBACK.Println(time.Now().Format(layout))
@@ -994,21 +1009,15 @@
 						c.Logger.ERROR.Println("Failed to rebuild site:", err)
 					}
 
-					if !buildWatch && !c.Cfg.GetBool("disableLiveReload") {
-
+					if doLiveReload {
 						navigate := c.Cfg.GetBool("navigateToChanged")
-
+						// We have fetched the same page above, but it may have
+						// changed.
 						var p *hugolib.Page
 
 						if navigate {
-
-							// It is probably more confusing than useful
-							// to navigate to a new URL on RENAME etc.
-							// so for now we use the WRITE and CREATE events only.
-							name := pickOneWriteOrCreatePath(dynamicEvents)
-
-							if name != "" {
-								p = Hugo.GetContentPage(name)
+							if onePageName != "" {
+								p = Hugo.GetContentPage(onePageName)
 							}
 						}
 
--- a/commands/server.go
+++ b/commands/server.go
@@ -41,6 +41,8 @@
 	liveReloadPort    int
 	serverWatch       bool
 	noHTTPCache       bool
+
+	disableFastRender bool
 )
 
 var serverCmd = &cobra.Command{
@@ -94,6 +96,8 @@
 	serverCmd.Flags().BoolVar(&disableLiveReload, "disableLiveReload", false, "watch without enabling live browser reload on rebuild")
 	serverCmd.Flags().BoolVar(&navigateToChanged, "navigateToChanged", false, "navigate to changed content file on live browser reload")
 	serverCmd.Flags().BoolVar(&renderToDisk, "renderToDisk", false, "render to Destination path (default is render to memory & serve from there)")
+	serverCmd.Flags().BoolVar(&disableFastRender, "disableFastRender", false, "enables full re-renders on changes")
+
 	serverCmd.Flags().String("memstats", "", "log memory usage to this file")
 	serverCmd.Flags().String("meminterval", "100ms", "interval to poll memory usage (requires --memstats), valid time units are \"ns\", \"us\" (or \"µs\"), \"ms\", \"s\", \"m\", \"h\".")
 
@@ -120,6 +124,10 @@
 		c.Set("navigateToChanged", navigateToChanged)
 	}
 
+	if cmd.Flags().Changed("disableFastRender") {
+		c.Set("disableFastRender", disableFastRender)
+	}
+
 	if serverWatch {
 		c.Set("watch", true)
 	}
@@ -214,11 +222,26 @@
 
 	httpFs := afero.NewHttpFs(c.Fs.Destination)
 	fs := filesOnlyFs{httpFs.Dir(c.PathSpec().AbsPathify(c.Cfg.GetString("publishDir")))}
+
+	doLiveReload := !buildWatch && !c.Cfg.GetBool("disableLiveReload")
+	fastRenderMode := doLiveReload && !c.Cfg.GetBool("disableFastRender")
+
+	if fastRenderMode {
+		jww.FEEDBACK.Println("Running in Fast Render Mode. For full rebuilds on change: hugo server --disableFastRender")
+	}
+
 	decorate := func(h http.Handler) http.Handler {
 		return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
 			if noHTTPCache {
 				w.Header().Set("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0")
 				w.Header().Set("Pragma", "no-cache")
+			}
+
+			if fastRenderMode {
+				p := r.URL.Path
+				if strings.HasSuffix(p, "/") || strings.HasSuffix(p, "html") || strings.HasSuffix(p, "htm") {
+					c.visitedURLs.Add(p)
+				}
 			}
 			h.ServeHTTP(w, r)
 		})
--- /dev/null
+++ b/common/types/evictingqueue.go
@@ -1,0 +1,89 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package types contains types shared between packages in Hugo.
+package types
+
+import (
+	"sync"
+)
+
+// EvictingStringQueue is a queue which automatically evicts elements from the head of
+// the queue when attempting to add new elements onto the queue and it is full.
+// This queue orders elements LIFO (last-in-first-out). It throws away duplicates.
+// Note: This queue currently does not contain any remove (poll etc.) methods.
+type EvictingStringQueue struct {
+	size int
+	vals []string
+	set  map[string]bool
+	mu   sync.Mutex
+}
+
+// NewEvictingStringQueue creates a new queue with the given size.
+func NewEvictingStringQueue(size int) *EvictingStringQueue {
+	return &EvictingStringQueue{size: size, set: make(map[string]bool)}
+}
+
+// Add adds a new string to the tail of the queue if it's not already there.
+func (q *EvictingStringQueue) Add(v string) {
+	q.mu.Lock()
+	if q.set[v] {
+		q.mu.Unlock()
+		return
+	}
+
+	if len(q.set) == q.size {
+		// Full
+		delete(q.set, q.vals[0])
+		q.vals = append(q.vals[:0], q.vals[1:]...)
+	}
+	q.set[v] = true
+	q.vals = append(q.vals, v)
+	q.mu.Unlock()
+}
+
+// Peek looks at the last element added to the queue.
+func (q *EvictingStringQueue) Peek() string {
+	q.mu.Lock()
+	l := len(q.vals)
+	if l == 0 {
+		q.mu.Unlock()
+		return ""
+	}
+	elem := q.vals[l-1]
+	q.mu.Unlock()
+	return elem
+}
+
+// PeekAll looks at all the elements in the queue, with the newest first.
+func (q *EvictingStringQueue) PeekAll() []string {
+	q.mu.Lock()
+	vals := make([]string, len(q.vals))
+	copy(vals, q.vals)
+	q.mu.Unlock()
+	for i, j := 0, len(vals)-1; i < j; i, j = i+1, j-1 {
+		vals[i], vals[j] = vals[j], vals[i]
+	}
+	return vals
+}
+
+// PeekAllSet returns PeekAll as a set.
+func (q *EvictingStringQueue) PeekAllSet() map[string]bool {
+	all := q.PeekAll()
+	set := make(map[string]bool)
+	for _, v := range all {
+		set[v] = true
+	}
+
+	return set
+}
--- /dev/null
+++ b/common/types/evictingqueue_test.go
@@ -1,0 +1,71 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package types
+
+import (
+	"sync"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestEvictingStringQueue(t *testing.T) {
+	assert := require.New(t)
+
+	queue := NewEvictingStringQueue(3)
+
+	assert.Equal("", queue.Peek())
+	queue.Add("a")
+	queue.Add("b")
+	queue.Add("a")
+	assert.Equal("b", queue.Peek())
+	queue.Add("b")
+	assert.Equal("b", queue.Peek())
+
+	queue.Add("a")
+	queue.Add("b")
+
+	assert.Equal([]string{"b", "a"}, queue.PeekAll())
+	assert.Equal("b", queue.Peek())
+	queue.Add("c")
+	queue.Add("d")
+	// Overflowed, a should now be removed.
+	assert.Equal([]string{"d", "c", "b"}, queue.PeekAll())
+	assert.Len(queue.PeekAllSet(), 3)
+	assert.True(queue.PeekAllSet()["c"])
+}
+
+func TestEvictingStringQueueConcurrent(t *testing.T) {
+	var wg sync.WaitGroup
+	val := "someval"
+
+	queue := NewEvictingStringQueue(3)
+
+	for j := 0; j < 100; j++ {
+		wg.Add(1)
+		go func() {
+			defer wg.Done()
+			queue.Add(val)
+			v := queue.Peek()
+			if v != val {
+				t.Error("wrong val")
+			}
+			vals := queue.PeekAll()
+			if len(vals) != 1 || vals[0] != val {
+				t.Error("wrong val")
+			}
+		}()
+	}
+	wg.Wait()
+}
--- a/hugolib/config.go
+++ b/hugolib/config.go
@@ -152,6 +152,7 @@
 	v.SetDefault("ignoreFiles", make([]string, 0))
 	v.SetDefault("disableAliases", false)
 	v.SetDefault("debug", false)
+	v.SetDefault("disableFastRender", false)
 
 	return nil
 }
--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -280,6 +280,8 @@
 	SkipRender bool
 	// Use this to indicate what changed (for rebuilds).
 	whatChanged *whatChanged
+	// Recently visited URLs. This is used for partial re-rendering.
+	RecentlyVisited map[string]bool
 }
 
 func (h *HugoSites) renderCrossSitesArtifacts() error {
--- a/hugolib/hugo_sites_build.go
+++ b/hugolib/hugo_sites_build.go
@@ -230,7 +230,7 @@
 			s.preparePagesForRender(config)
 
 			if !config.SkipRender {
-				if err := s.render(i); err != nil {
+				if err := s.render(config, i); err != nil {
 					return err
 				}
 			}
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -893,7 +893,7 @@
 	s.Info.LastChange = siteLastChange
 }
 
-func (s *Site) render(outFormatIdx int) (err error) {
+func (s *Site) render(config *BuildCfg, outFormatIdx int) (err error) {
 
 	if outFormatIdx == 0 {
 		if err = s.preparePages(); err != nil {
@@ -917,7 +917,7 @@
 
 	}
 
-	if err = s.renderPages(); err != nil {
+	if err = s.renderPages(config.RecentlyVisited); err != nil {
 		return
 	}
 
--- a/hugolib/site_render.go
+++ b/hugolib/site_render.go
@@ -27,7 +27,7 @@
 
 // renderPages renders pages each corresponding to a markdown file.
 // TODO(bep np doc
-func (s *Site) renderPages() error {
+func (s *Site) renderPages(filter map[string]bool) error {
 
 	results := make(chan error)
 	pages := make(chan *Page)
@@ -44,7 +44,12 @@
 		go pageRenderer(s, pages, results, wg)
 	}
 
+	hasFilter := filter != nil && len(filter) > 0
+
 	for _, page := range s.Pages {
+		if hasFilter && !filter[page.RelPermalink()] {
+			continue
+		}
 		pages <- page
 	}