shithub: hugo

Download patch

ref: b4bcc591e45e9526b7eb9fe77f355da5ead8e627
parent: 6e27239485ffd83b33b743fef53efffd63f0826c
author: spf13 <[email protected]>
date: Tue Aug 13 15:39:24 EDT 2013

Now support for nested paths. Better section detection.

--- a/hugolib/node.go
+++ b/hugolib/node.go
@@ -19,18 +19,23 @@
 )
 
 type Node struct {
-	Url         string
-	Permalink   template.HTML
 	RSSlink     template.HTML
 	Site        SiteInfo
 	layout      string
 	Data        map[string]interface{}
-	Section     string
-	Slug        string
 	Title       string
 	Description string
 	Keywords    []string
 	Date        time.Time
+	UrlPath
+}
+
+type UrlPath struct {
+	Url       string
+	Permalink template.HTML
+	Slug      string
+	Section   string
+	Path      string
 }
 
 func (n *Node) GetSection() string {
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -81,27 +81,75 @@
 func (p Pages) Sort()             { sort.Sort(p) }
 func (p Pages) Limit(n int) Pages { return p[0:n] }
 
-func initializePage(filename string) (page Page) {
-	page = Page{contentType: "",
+// TODO abstract further to support loading from more
+// than just files on disk. Should load reader (file, []byte)
+func NewPage(filename string) *Page {
+	page := Page{contentType: "",
 		File:   File{FileName: filename, Extension: "html"},
 		Node:   Node{Keywords: make([]string, 10, 30)},
 		Params: make(map[string]interface{}),
 		Markup: "md"}
 	page.Date, _ = time.Parse("20060102", "20080101")
-	page.setSection()
+	return &page
+}
 
-	return page
+func (page *Page) Initalize() error {
+	err := page.setUrlPath()
+	if err != nil {
+		return err
+	}
+	err = page.buildPageFromFile()
+	if err != nil {
+		return err
+	}
+	page.analyzePage()
+	return nil
 }
 
-func (p *Page) setSection() {
-	x := strings.Split(p.FileName, string(os.PathSeparator))
+func (p *Page) setUrlPath() error {
+	y := strings.TrimPrefix(p.FileName, p.Site.Config.GetAbsPath(p.Site.Config.ContentDir))
+	x := strings.Split(y, string(os.PathSeparator))
+
 	if len(x) <= 1 {
+		return errors.New("Zero length page name")
+	}
+
+	p.Section = strings.Trim(x[1], "/\\")
+	p.Path = strings.Trim(strings.Join(x[:len(x)-1], string(os.PathSeparator)), "/\\")
+	return nil
+}
+
+// If Url is provided it is assumed to be the complete relative path
+// and will override everything
+// Otherwise path + slug is used if provided
+// Lastly path + filename is used if provided
+func (p *Page) setOutFile() {
+	// Always use Url if it's specified
+	if len(strings.TrimSpace(p.Url)) > 2 {
+		p.OutFile = strings.TrimSpace(p.Url)
 		return
 	}
 
-	if section := x[len(x)-2]; section != "content" {
-		p.Section = section
+	var outfile string
+	if len(strings.TrimSpace(p.Slug)) > 0 {
+		// Use Slug if provided
+		if p.Site.Config.UglyUrls {
+			outfile = p.Slug + "." + p.Extension
+		} else {
+			outfile = p.Slug + slash + "index." + p.Extension
+		}
+	} else {
+		// Fall back to filename
+		_, t := filepath.Split(p.FileName)
+		if p.Site.Config.UglyUrls {
+			outfile = replaceExtension(strings.TrimSpace(t), p.Extension)
+		} else {
+			file, _ := fileExt(strings.TrimSpace(t))
+			outfile = file + slash + "index." + p.Extension
+		}
 	}
+
+	p.OutFile = p.Path + string(os.PathSeparator) + strings.TrimSpace(outfile)
 }
 
 func (page *Page) Type() string {
@@ -108,7 +156,7 @@
 	if page.contentType != "" {
 		return page.contentType
 	}
-	page.setSection()
+	page.setUrlPath()
 	if x := page.GetSection(); x != "" {
 		return x
 	}
@@ -136,7 +184,7 @@
 		return nil, errors.New("Zero length page name")
 	}
 
-	p := initializePage(name)
+	p := NewPage(name)
 
 	if err = p.parse(buf); err != nil {
 		return
@@ -144,22 +192,7 @@
 
 	p.analyzePage()
 
-	return &p, nil
-}
-
-// TODO should return errors as well
-// TODO new page should return just a page
-// TODO initalize separately... load from reader (file, or []byte)
-func NewPage(filename string) *Page {
-	p := initializePage(filename)
-
-	if err := p.buildPageFromFile(); err != nil {
-		fmt.Println(err)
-	}
-
-	p.analyzePage()
-
-	return &p
+	return p, nil
 }
 
 func (p *Page) analyzePage() {
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -263,7 +263,8 @@
 		page := NewPage(fileName)
 		page.Site = s.Info
 		page.Tmpl = s.Tmpl
-		s.setOutFile(page)
+		page.Initalize()
+		page.setOutFile()
 		if s.Config.BuildDrafts || !page.Draft {
 			s.Pages = append(s.Pages, page)
 		}
@@ -361,30 +362,7 @@
 
 func (s *Site) WritePages() {
 	for _, p := range s.Pages {
-		s.WritePublic(p.Section+slash+p.OutFile, p.RenderedContent.Bytes())
-	}
-}
-
-func (s *Site) setOutFile(p *Page) {
-	if len(strings.TrimSpace(p.Slug)) > 0 {
-		// Use Slug if provided
-		if s.Config.UglyUrls {
-			p.OutFile = strings.TrimSpace(p.Slug + "." + p.Extension)
-		} else {
-			p.OutFile = strings.TrimSpace(p.Slug + slash + "index.html")
-		}
-	} else if len(strings.TrimSpace(p.Url)) > 2 {
-		// Use Url if provided & Slug missing
-		p.OutFile = strings.TrimSpace(p.Url)
-	} else {
-		// Fall back to filename
-		_, t := filepath.Split(p.FileName)
-		if s.Config.UglyUrls {
-			p.OutFile = replaceExtension(strings.TrimSpace(t), p.Extension)
-		} else {
-			file, _ := fileExt(strings.TrimSpace(t))
-			p.OutFile = file + slash + "index." + p.Extension
-		}
+		s.WritePublic(p.OutFile, p.RenderedContent.Bytes())
 	}
 }