2020-11-20 17:07:38 +00:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2020-11-22 20:14:50 +00:00
|
|
|
"bytes"
|
2021-04-21 18:39:13 +00:00
|
|
|
"fmt"
|
2021-04-21 01:28:29 +00:00
|
|
|
"io"
|
2022-02-09 18:18:11 +00:00
|
|
|
"io/fs"
|
2021-04-21 01:28:29 +00:00
|
|
|
"log"
|
2020-11-20 17:07:38 +00:00
|
|
|
"os"
|
2021-04-21 01:28:29 +00:00
|
|
|
"os/exec"
|
2020-11-20 17:07:38 +00:00
|
|
|
pathpkg "path"
|
|
|
|
"sort"
|
|
|
|
"strings"
|
|
|
|
"time"
|
2021-05-09 20:14:50 +00:00
|
|
|
|
|
|
|
"gopkg.in/yaml.v3"
|
2020-11-20 17:07:38 +00:00
|
|
|
)
|
|
|
|
|
2021-05-09 20:14:50 +00:00
|
|
|
// Page represents a page.
|
|
|
|
type Page struct {
|
2022-02-09 20:52:22 +00:00
|
|
|
Title string `yaml:"title"`
|
|
|
|
Date time.Time `yaml:"date"`
|
|
|
|
Weight int `yaml:"weight"`
|
|
|
|
Outputs []string `yaml:"outputs"`
|
|
|
|
Params map[string]interface{} `yaml:"params"`
|
|
|
|
Path string `yaml:"-"`
|
|
|
|
FilePath string `yaml:"-"`
|
|
|
|
URL string `yaml:"-"`
|
|
|
|
Content string `yaml:"-"`
|
|
|
|
Prev *Page `yaml:"-"`
|
|
|
|
Next *Page `yaml:"-"`
|
|
|
|
Pages []*Page `yaml:"-"`
|
|
|
|
Dirs []*Page `yaml:"-"`
|
2021-10-02 23:20:47 +00:00
|
|
|
feeds map[string][]byte
|
|
|
|
index bool
|
2020-11-20 17:07:38 +00:00
|
|
|
}
|
|
|
|
|
2021-05-10 14:35:54 +00:00
|
|
|
// read reads from a directory and indexes the files and directories within it.
|
2021-10-02 21:50:23 +00:00
|
|
|
func (p *Page) read(srcDir string, task *Task, cfg *Site) error {
|
2022-02-09 18:18:11 +00:00
|
|
|
return p._read(os.DirFS(srcDir), ".", task, cfg)
|
2021-04-21 01:28:29 +00:00
|
|
|
}
|
|
|
|
|
2022-02-09 18:18:11 +00:00
|
|
|
func (p *Page) _read(fsys fs.FS, path string, task *Task, cfg *Site) error {
|
|
|
|
entries, err := fs.ReadDir(fsys, path)
|
2020-11-20 17:07:38 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
for _, entry := range entries {
|
|
|
|
name := entry.Name()
|
|
|
|
path := pathpkg.Join(path, name)
|
|
|
|
if entry.IsDir() {
|
2021-05-12 19:20:14 +00:00
|
|
|
// Ignore directories beginning with "_"
|
|
|
|
if strings.HasPrefix(name, "_") {
|
|
|
|
continue
|
|
|
|
}
|
2020-11-20 17:07:38 +00:00
|
|
|
// Gather directory data
|
2021-10-02 23:20:47 +00:00
|
|
|
dirPath := "/" + path + "/"
|
|
|
|
dir := &Page{
|
|
|
|
Path: dirPath,
|
|
|
|
FilePath: path,
|
|
|
|
URL: task.URL + dirPath,
|
|
|
|
}
|
2022-02-09 18:18:11 +00:00
|
|
|
if err := dir._read(fsys, path, task, cfg); err != nil {
|
2020-11-20 17:07:38 +00:00
|
|
|
return err
|
|
|
|
}
|
2021-10-02 21:50:23 +00:00
|
|
|
p.Dirs = append(p.Dirs, dir)
|
2021-05-10 16:23:27 +00:00
|
|
|
} else if ext := pathpkg.Ext(name); task.Match(ext) {
|
2021-05-12 19:20:14 +00:00
|
|
|
// Ignore pages beginning with "_" with the exception of _index pages
|
|
|
|
namePrefix := strings.TrimSuffix(name, ext)
|
|
|
|
if strings.HasPrefix(name, "_") && namePrefix != "_index" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2022-02-09 18:18:11 +00:00
|
|
|
content, err := fs.ReadFile(fsys, path)
|
2020-11-20 17:07:38 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2021-04-21 01:28:29 +00:00
|
|
|
|
2021-10-02 21:50:23 +00:00
|
|
|
page := &Page{
|
|
|
|
FilePath: path,
|
|
|
|
}
|
|
|
|
if namePrefix == "_index" {
|
|
|
|
p.index = true
|
|
|
|
page = p
|
|
|
|
}
|
2021-05-09 20:14:50 +00:00
|
|
|
|
|
|
|
// Try to parse the date from the page filename
|
|
|
|
const layout = "2006-01-02"
|
|
|
|
base := pathpkg.Base(path)
|
|
|
|
if len(base) >= len(layout) {
|
|
|
|
dateStr := base[:len(layout)]
|
|
|
|
if time, err := time.Parse(layout, dateStr); err == nil {
|
|
|
|
page.Date = time
|
|
|
|
// Remove the date from the path
|
|
|
|
base = base[len(layout):]
|
|
|
|
if len(base) > 0 {
|
|
|
|
// Remove a leading dash
|
|
|
|
if base[0] == '-' {
|
|
|
|
base = base[1:]
|
|
|
|
}
|
|
|
|
if len(base) > 0 {
|
|
|
|
dir := pathpkg.Dir(path)
|
|
|
|
if dir == "." {
|
|
|
|
dir = ""
|
|
|
|
}
|
|
|
|
path = pathpkg.Join(dir, base)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Extract frontmatter from content
|
|
|
|
frontmatter, content := extractFrontmatter(content)
|
|
|
|
if len(frontmatter) != 0 {
|
|
|
|
if err := yaml.Unmarshal(frontmatter, page); err != nil {
|
|
|
|
log.Printf("failed to parse frontmatter for %q: %v", path, err)
|
2022-02-09 20:52:22 +00:00
|
|
|
} else if page.Outputs != nil {
|
|
|
|
// Enforce page outputs
|
|
|
|
if len(page.Outputs) == 0 || task.Name == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
found := false
|
|
|
|
for _, output := range page.Outputs {
|
|
|
|
if output == task.Name {
|
|
|
|
found = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !found {
|
|
|
|
continue
|
|
|
|
}
|
2021-05-09 20:14:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Trim leading newlines from content
|
|
|
|
content = bytes.TrimLeft(content, "\r\n")
|
|
|
|
}
|
|
|
|
|
2022-02-09 05:25:48 +00:00
|
|
|
if cmd, ok := task.preprocess[strings.TrimPrefix(ext, ".")]; ok {
|
2021-05-23 00:41:52 +00:00
|
|
|
var buf bytes.Buffer
|
2021-09-03 04:31:07 +00:00
|
|
|
if err := execute(cmd, bytes.NewReader(content), &buf); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2021-05-23 00:41:52 +00:00
|
|
|
content = buf.Bytes()
|
2021-04-21 01:28:29 +00:00
|
|
|
}
|
2021-05-09 20:14:50 +00:00
|
|
|
page.Content = string(content)
|
2021-04-21 01:28:29 +00:00
|
|
|
|
2021-10-02 21:50:23 +00:00
|
|
|
if !page.index {
|
2021-05-12 19:20:14 +00:00
|
|
|
if namePrefix == "index" {
|
|
|
|
path = "/" + strings.TrimSuffix(path, name)
|
|
|
|
} else {
|
|
|
|
path = "/" + strings.TrimSuffix(path, ext)
|
2021-05-12 19:29:13 +00:00
|
|
|
if task.UglyURLs {
|
|
|
|
path += task.OutputExt
|
|
|
|
} else {
|
|
|
|
path += "/"
|
|
|
|
}
|
2021-05-10 04:44:25 +00:00
|
|
|
}
|
2021-10-02 23:20:47 +00:00
|
|
|
page.Path = path
|
|
|
|
if permalink, ok := cfg.permalinks[p.Path]; ok {
|
2021-05-10 15:06:55 +00:00
|
|
|
var b strings.Builder
|
|
|
|
permalink.Execute(&b, page)
|
2021-10-02 23:20:47 +00:00
|
|
|
page.Path = b.String()
|
2021-05-10 15:06:55 +00:00
|
|
|
}
|
2021-10-02 23:20:47 +00:00
|
|
|
page.URL = task.URL + page.Path
|
2021-10-02 21:50:23 +00:00
|
|
|
p.Pages = append(p.Pages, page)
|
2020-11-20 17:07:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-05-10 14:35:54 +00:00
|
|
|
// process processes the directory's contents.
|
2021-10-02 21:50:23 +00:00
|
|
|
func (p *Page) process(cfg *Site, task *Task) error {
|
2021-09-03 03:41:52 +00:00
|
|
|
// Build feeds before templates are applied to the page contents
|
2021-10-02 21:50:23 +00:00
|
|
|
for _, feed := range task.feeds[p.FilePath] {
|
|
|
|
b, err := p.buildFeed(cfg, feed)
|
2021-09-02 09:59:08 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2021-10-02 21:50:23 +00:00
|
|
|
p.addFeed(feed.Output, b)
|
2021-09-02 09:59:08 +00:00
|
|
|
}
|
|
|
|
|
2021-04-21 01:28:29 +00:00
|
|
|
if task.TemplateExt != "" {
|
2021-03-21 03:17:58 +00:00
|
|
|
// Create index
|
2021-10-02 21:50:23 +00:00
|
|
|
if p.index {
|
2022-02-09 18:18:11 +00:00
|
|
|
tmpl, ok := cfg.templates.FindTemplate(p.FilePath, "index"+task.TemplateExt)
|
2022-02-09 05:39:48 +00:00
|
|
|
if !ok {
|
|
|
|
// Try the base template
|
2022-02-09 18:18:11 +00:00
|
|
|
tmpl, ok = cfg.templates.FindTemplate(p.FilePath, "base"+task.TemplateExt)
|
2022-02-09 05:39:48 +00:00
|
|
|
}
|
2021-04-21 18:39:13 +00:00
|
|
|
if ok {
|
|
|
|
var b strings.Builder
|
2021-10-02 21:50:23 +00:00
|
|
|
if err := tmpl.Execute(&b, p); err != nil {
|
2021-04-21 18:39:13 +00:00
|
|
|
return err
|
|
|
|
}
|
2021-10-02 21:50:23 +00:00
|
|
|
p.Content = b.String()
|
2021-03-21 03:17:58 +00:00
|
|
|
}
|
2020-11-20 17:07:38 +00:00
|
|
|
}
|
|
|
|
|
2021-04-21 01:28:29 +00:00
|
|
|
// Process pages
|
2021-10-02 21:50:23 +00:00
|
|
|
for i := range p.Pages {
|
2021-03-21 03:17:58 +00:00
|
|
|
var b strings.Builder
|
2022-02-09 18:18:11 +00:00
|
|
|
tmpl, ok := cfg.templates.FindTemplate(p.FilePath, "page"+task.TemplateExt)
|
2022-02-09 05:39:48 +00:00
|
|
|
if !ok {
|
|
|
|
// Try the base template
|
2022-02-09 18:18:11 +00:00
|
|
|
tmpl, ok = cfg.templates.FindTemplate(p.FilePath, "base"+task.TemplateExt)
|
2022-02-09 05:39:48 +00:00
|
|
|
}
|
2021-04-21 18:39:13 +00:00
|
|
|
if ok {
|
2021-10-02 21:50:23 +00:00
|
|
|
if err := tmpl.Execute(&b, p.Pages[i]); err != nil {
|
2021-04-21 18:39:13 +00:00
|
|
|
return err
|
|
|
|
}
|
2021-10-02 21:50:23 +00:00
|
|
|
p.Pages[i].Content = b.String()
|
2021-03-21 03:17:58 +00:00
|
|
|
}
|
2020-11-20 17:07:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-21 01:28:29 +00:00
|
|
|
// Process subdirectories
|
2021-10-02 21:50:23 +00:00
|
|
|
for _, d := range p.Dirs {
|
2021-05-10 14:35:54 +00:00
|
|
|
if err := d.process(cfg, task); err != nil {
|
2020-11-20 17:07:38 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-09-02 09:59:08 +00:00
|
|
|
// buildFeed build the feed of the directory
|
2021-10-02 21:50:23 +00:00
|
|
|
func (p *Page) buildFeed(cfg *Site, feed Feed) ([]byte, error) {
|
2021-09-03 04:22:07 +00:00
|
|
|
// Feed represents a feed.
|
|
|
|
type Feed struct {
|
2021-10-02 23:20:47 +00:00
|
|
|
Title string
|
|
|
|
Path string
|
|
|
|
URL string
|
|
|
|
Pages []*Page
|
2021-09-03 04:22:07 +00:00
|
|
|
}
|
|
|
|
|
2022-02-09 18:18:11 +00:00
|
|
|
tmpl, ok := cfg.templates.FindTemplate(p.FilePath, feed.Template)
|
2021-09-02 09:59:08 +00:00
|
|
|
if !ok {
|
2021-09-03 04:22:07 +00:00
|
|
|
return nil, fmt.Errorf("failed to generate feed %q: missing feed template %q", feed.Title, feed.Template)
|
2021-09-02 09:59:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
var b bytes.Buffer
|
2021-10-02 20:19:08 +00:00
|
|
|
data := Feed{
|
2021-10-02 23:20:47 +00:00
|
|
|
Title: feed.Title,
|
|
|
|
Path: p.Path,
|
|
|
|
URL: p.URL,
|
|
|
|
Pages: p.Pages,
|
2021-09-02 09:59:08 +00:00
|
|
|
}
|
|
|
|
if err := tmpl.Execute(&b, data); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return b.Bytes(), nil
|
|
|
|
}
|
|
|
|
|
2021-10-02 21:50:23 +00:00
|
|
|
func (p *Page) addFeed(name string, content []byte) {
|
|
|
|
if p.feeds == nil {
|
|
|
|
p.feeds = map[string][]byte{}
|
2021-09-02 09:59:08 +00:00
|
|
|
}
|
2021-10-02 21:50:23 +00:00
|
|
|
p.feeds[name] = content
|
2021-09-02 09:59:08 +00:00
|
|
|
}
|
|
|
|
|
2021-05-10 14:35:54 +00:00
|
|
|
// write writes the directory's contents to the provided destination path.
|
2021-10-02 21:50:23 +00:00
|
|
|
func (p *Page) write(dstDir string, task *Task) error {
|
2021-10-02 23:20:47 +00:00
|
|
|
dirPath := pathpkg.Join(dstDir, p.Path)
|
2020-11-20 17:07:38 +00:00
|
|
|
|
2020-11-20 20:03:41 +00:00
|
|
|
// Write pages
|
2021-10-02 21:50:23 +00:00
|
|
|
for _, page := range p.Pages {
|
2021-10-02 23:20:47 +00:00
|
|
|
dstPath := pathpkg.Join(dstDir, page.Path)
|
2021-10-02 21:50:23 +00:00
|
|
|
if !task.UglyURLs {
|
|
|
|
dstPath = pathpkg.Join(dstPath, "index"+task.OutputExt)
|
2021-05-10 04:44:25 +00:00
|
|
|
}
|
2021-10-02 21:50:23 +00:00
|
|
|
if err := page.writeTo(dstPath, task); err != nil {
|
|
|
|
return err
|
2021-04-21 01:28:29 +00:00
|
|
|
}
|
2021-10-02 21:50:23 +00:00
|
|
|
}
|
|
|
|
// Write index page
|
|
|
|
if p.index {
|
2021-10-02 23:20:47 +00:00
|
|
|
dstPath := pathpkg.Join(dstDir, p.Path, "index"+task.OutputExt)
|
2021-10-02 21:50:23 +00:00
|
|
|
if err := p.writeTo(dstPath, task); err != nil {
|
2020-11-20 17:07:38 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-02 09:59:08 +00:00
|
|
|
// Write feeds
|
2021-10-02 21:50:23 +00:00
|
|
|
for name, content := range p.feeds {
|
2021-09-02 09:59:08 +00:00
|
|
|
dstPath := pathpkg.Join(dstDir, name)
|
|
|
|
os.MkdirAll(dirPath, 0755)
|
|
|
|
if err := os.WriteFile(dstPath, content, 0644); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-20 17:07:38 +00:00
|
|
|
// Write subdirectories
|
2021-10-02 21:50:23 +00:00
|
|
|
for _, dir := range p.Dirs {
|
2021-05-10 14:35:54 +00:00
|
|
|
dir.write(dstDir, task)
|
2020-11-20 17:07:38 +00:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-10-02 21:50:23 +00:00
|
|
|
func (p *Page) writeTo(dstPath string, task *Task) error {
|
|
|
|
var content []byte
|
2022-02-09 05:25:48 +00:00
|
|
|
if cmd := task.postprocess; cmd != nil {
|
2021-10-02 21:50:23 +00:00
|
|
|
var buf bytes.Buffer
|
|
|
|
if err := execute(cmd, strings.NewReader(p.Content), &buf); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
content = buf.Bytes()
|
|
|
|
} else {
|
|
|
|
content = []byte(p.Content)
|
|
|
|
}
|
|
|
|
|
|
|
|
dir := pathpkg.Dir(dstPath)
|
|
|
|
os.MkdirAll(dir, 0755)
|
|
|
|
if err := os.WriteFile(dstPath, content, 0644); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-05-12 20:35:00 +00:00
|
|
|
// sort sorts the directory's pages by weight, then date, then filepath.
|
2021-10-02 21:50:23 +00:00
|
|
|
func (p *Page) sort() {
|
2021-10-02 22:08:01 +00:00
|
|
|
sortPages(p.Pages)
|
|
|
|
sortPages(p.Dirs)
|
|
|
|
|
|
|
|
// Sort subdirectories
|
|
|
|
for _, d := range p.Dirs {
|
|
|
|
d.sort()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func sortPages(pages []*Page) {
|
|
|
|
sort.Slice(pages, func(i, j int) bool {
|
|
|
|
pi, pj := pages[i], pages[j]
|
2021-05-18 19:00:16 +00:00
|
|
|
return pi.FilePath < pj.FilePath
|
|
|
|
})
|
|
|
|
|
2021-10-02 22:08:01 +00:00
|
|
|
sort.SliceStable(pages, func(i, j int) bool {
|
|
|
|
pi, pj := pages[i], pages[j]
|
2021-05-18 19:00:16 +00:00
|
|
|
return pi.Date.After(pj.Date)
|
|
|
|
})
|
|
|
|
|
2021-10-02 22:08:01 +00:00
|
|
|
sort.SliceStable(pages, func(i, j int) bool {
|
|
|
|
pi, pj := pages[i], pages[j]
|
2021-05-18 19:00:16 +00:00
|
|
|
return pi.Weight < pj.Weight
|
2020-11-20 17:07:38 +00:00
|
|
|
})
|
2021-05-12 19:10:40 +00:00
|
|
|
|
2021-10-02 22:08:01 +00:00
|
|
|
for i := range pages {
|
2021-05-12 20:20:19 +00:00
|
|
|
if i-1 >= 0 {
|
2021-10-02 22:08:01 +00:00
|
|
|
pages[i].Prev = pages[i-1]
|
2021-05-12 19:10:40 +00:00
|
|
|
}
|
2021-10-02 22:08:01 +00:00
|
|
|
if i+1 < len(pages) {
|
|
|
|
pages[i].Next = pages[i+1]
|
2021-05-12 19:10:40 +00:00
|
|
|
}
|
|
|
|
}
|
2020-11-20 17:07:38 +00:00
|
|
|
}
|
2021-04-21 01:28:29 +00:00
|
|
|
|
2021-05-23 00:41:52 +00:00
|
|
|
// execute runs a command.
|
2022-02-09 05:25:48 +00:00
|
|
|
func execute(command []string, input io.Reader, output io.Writer) error {
|
|
|
|
if len(command) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
cmd := exec.Command(command[0], command[1:]...)
|
2021-04-21 01:28:29 +00:00
|
|
|
cmd.Stdin = input
|
|
|
|
cmd.Stderr = os.Stderr
|
2021-05-23 00:41:52 +00:00
|
|
|
cmd.Stdout = output
|
|
|
|
err := cmd.Run()
|
2021-04-21 01:28:29 +00:00
|
|
|
if err != nil {
|
2021-09-03 04:31:07 +00:00
|
|
|
return err
|
2021-04-21 01:28:29 +00:00
|
|
|
}
|
2021-09-03 04:31:07 +00:00
|
|
|
return nil
|
2021-04-21 01:28:29 +00:00
|
|
|
}
|
2021-05-09 20:14:50 +00:00
|
|
|
|
2021-10-02 21:50:23 +00:00
|
|
|
func (p *Page) getPage(path string) *Page {
|
2021-05-14 04:17:10 +00:00
|
|
|
// XXX: This is inefficient
|
2021-10-02 23:20:47 +00:00
|
|
|
if p.Path == path {
|
2021-10-02 21:50:23 +00:00
|
|
|
return p
|
2021-05-14 04:17:10 +00:00
|
|
|
}
|
2021-10-02 21:50:23 +00:00
|
|
|
for _, page := range p.Pages {
|
|
|
|
if page.FilePath == path {
|
|
|
|
return page
|
2021-05-14 04:17:10 +00:00
|
|
|
}
|
|
|
|
}
|
2021-10-02 21:50:23 +00:00
|
|
|
for _, dir := range p.Dirs {
|
2021-10-02 23:20:47 +00:00
|
|
|
if dir.Path == path {
|
2021-05-14 04:17:10 +00:00
|
|
|
return dir
|
|
|
|
}
|
|
|
|
}
|
2021-10-02 21:50:23 +00:00
|
|
|
for _, dir := range p.Dirs {
|
2021-05-14 04:17:10 +00:00
|
|
|
if page := dir.getPage(path); page != nil {
|
|
|
|
return page
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|