2017-06-10 21:38:09 +02:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"io/ioutil"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
"strings"
|
|
|
|
|
|
|
|
yaml "gopkg.in/yaml.v2"
|
|
|
|
)
|
|
|
|
|
2017-06-13 17:00:24 +02:00
|
|
|
// Site is a Jekyll site.
|
|
|
|
type Site struct {
|
|
|
|
Config SiteConfig
|
|
|
|
Source string
|
|
|
|
Dest string
|
|
|
|
Data map[interface{}]interface{}
|
|
|
|
Paths map[string]*Page // URL path -> *Page
|
|
|
|
}
|
|
|
|
|
|
|
|
// For now (and maybe always?), there's just one site.
|
|
|
|
var site Site
|
|
|
|
|
2017-06-10 21:38:09 +02:00
|
|
|
// SiteConfig is the Jekyll site configuration, typically read from _config.yml.
|
2017-06-12 23:12:40 +02:00
|
|
|
// See https://jekyllrb.com/docs/configuration/#default-configuration
|
2017-06-10 21:38:09 +02:00
|
|
|
type SiteConfig struct {
|
2017-06-13 14:54:35 +02:00
|
|
|
// Where things are:
|
|
|
|
SourceDir string `yaml:"source"`
|
2017-06-12 23:12:40 +02:00
|
|
|
DestinationDir string `yaml:"destination"`
|
|
|
|
Collections map[string]interface{}
|
|
|
|
|
2017-06-13 14:54:35 +02:00
|
|
|
// Handling Reading
|
|
|
|
Include []string
|
|
|
|
Exclude []string
|
|
|
|
MarkdownExt string `yaml:"markdown_ext"`
|
|
|
|
|
|
|
|
// Outputting
|
2017-06-12 23:12:40 +02:00
|
|
|
Permalink string
|
2017-06-10 21:38:09 +02:00
|
|
|
}
|
|
|
|
|
2017-06-12 23:12:40 +02:00
|
|
|
const siteConfigDefaults = `
|
|
|
|
# Where things are
|
|
|
|
source: .
|
|
|
|
destination: ./_site
|
|
|
|
include: [".htaccess"]
|
|
|
|
data_dir: _data
|
|
|
|
includes_dir: _includes
|
|
|
|
collections:
|
|
|
|
posts:
|
|
|
|
output: true
|
|
|
|
|
|
|
|
# Handling Reading
|
|
|
|
include: [".htaccess"]
|
|
|
|
exclude: ["Gemfile", "Gemfile.lock", "node_modules", "vendor/bundle/", "vendor/cache/", "vendor/gems/", "vendor/ruby/"]
|
|
|
|
keep_files: [".git", ".svn"]
|
|
|
|
encoding: "utf-8"
|
|
|
|
markdown_ext: "markdown,mkdown,mkdn,mkd,md"
|
|
|
|
strict_front_matter: false
|
|
|
|
|
|
|
|
# Outputting
|
|
|
|
permalink: date
|
|
|
|
paginate_path: /page:num
|
|
|
|
timezone: null
|
|
|
|
`
|
|
|
|
|
2017-06-13 17:00:24 +02:00
|
|
|
//TODO permalink: "/:categories/:year/:month/:day/:title.html",
|
2017-06-13 14:54:35 +02:00
|
|
|
|
|
|
|
// For unit tests
|
|
|
|
func init() {
|
2017-06-13 17:00:24 +02:00
|
|
|
site.Initialize()
|
2017-06-12 00:36:31 +02:00
|
|
|
}
|
2017-06-10 23:51:46 +02:00
|
|
|
|
2017-06-13 17:00:24 +02:00
|
|
|
func (s *Site) Initialize() {
|
|
|
|
y := []byte(siteConfigDefaults)
|
|
|
|
if err := yaml.Unmarshal(y, &s.Config); err != nil {
|
2017-06-13 14:54:35 +02:00
|
|
|
panic(err)
|
|
|
|
}
|
2017-06-13 17:00:24 +02:00
|
|
|
if err := yaml.Unmarshal(y, &s.Data); err != nil {
|
2017-06-13 14:54:35 +02:00
|
|
|
panic(err)
|
2017-06-12 23:12:40 +02:00
|
|
|
}
|
2017-06-13 17:00:24 +02:00
|
|
|
s.Paths = make(map[string]*Page)
|
2017-06-13 14:54:35 +02:00
|
|
|
}
|
|
|
|
|
2017-06-13 17:00:24 +02:00
|
|
|
func (s *Site) ReadConfig(path string) error {
|
|
|
|
s.Initialize()
|
2017-06-12 23:12:40 +02:00
|
|
|
switch configBytes, err := ioutil.ReadFile(path); {
|
|
|
|
case err != nil && !os.IsNotExist(err):
|
|
|
|
return nil
|
|
|
|
case err != nil:
|
|
|
|
return err
|
|
|
|
default:
|
2017-06-13 17:00:24 +02:00
|
|
|
if err := yaml.Unmarshal(configBytes, s.Config); err != nil {
|
2017-06-13 14:52:35 +02:00
|
|
|
return err
|
|
|
|
}
|
2017-06-13 17:00:24 +02:00
|
|
|
return yaml.Unmarshal(configBytes, s.Data)
|
2017-06-10 21:38:09 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-13 17:00:24 +02:00
|
|
|
func (s *Site) KeepFile(p string) bool {
|
|
|
|
// TODO
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2017-06-13 14:54:35 +02:00
|
|
|
// MarkdownExtensions returns a set of markdown extension.
|
2017-06-13 17:00:24 +02:00
|
|
|
func (s *Site) MarkdownExtensions() map[string]bool {
|
|
|
|
extns := strings.SplitN(s.Config.MarkdownExt, `,`, -1)
|
2017-06-13 14:54:35 +02:00
|
|
|
return stringArrayToMap(extns)
|
|
|
|
}
|
2017-06-13 14:55:15 +02:00
|
|
|
|
2017-06-13 17:00:24 +02:00
|
|
|
// GetFileURL returns the URL path given a file path, relative to the site source directory.
|
|
|
|
func (s *Site) GetFileURL(path string) (string, bool) {
|
|
|
|
for _, v := range s.Paths {
|
2017-06-13 14:55:15 +02:00
|
|
|
if v.Path == path {
|
|
|
|
return v.Permalink, true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return "", false
|
|
|
|
}
|
|
|
|
|
2017-06-13 17:00:24 +02:00
|
|
|
// Exclude returns true iff a site excludes a file.
|
|
|
|
func (s *Site) Exclude(path string) bool {
|
|
|
|
// TODO exclude based on glob, not exact match
|
|
|
|
exclusionMap := stringArrayToMap(s.Config.Exclude)
|
|
|
|
base := filepath.Base(path)
|
|
|
|
switch {
|
|
|
|
case path == ".":
|
|
|
|
return false
|
|
|
|
case exclusionMap[path]:
|
|
|
|
return true
|
|
|
|
// TODO check Include
|
|
|
|
case strings.HasPrefix(base, "."), strings.HasPrefix(base, "_"):
|
|
|
|
return true
|
|
|
|
default:
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
2017-06-10 21:38:09 +02:00
|
|
|
|
2017-06-13 17:00:24 +02:00
|
|
|
// ReadFiles scans the source directory and creates pages and collections.
|
|
|
|
func (s *Site) ReadFiles() error {
|
|
|
|
d := map[interface{}]interface{}{}
|
2017-06-12 23:12:40 +02:00
|
|
|
|
2017-06-10 21:38:09 +02:00
|
|
|
walkFn := func(path string, info os.FileInfo, err error) error {
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2017-06-13 17:00:24 +02:00
|
|
|
rel, err := filepath.Rel(s.Config.SourceDir, path)
|
2017-06-10 21:38:09 +02:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2017-06-13 17:00:24 +02:00
|
|
|
switch {
|
|
|
|
case info.IsDir() && s.Exclude(rel):
|
|
|
|
return filepath.SkipDir
|
|
|
|
case info.IsDir(), s.Exclude(rel):
|
2017-06-10 21:38:09 +02:00
|
|
|
return nil
|
|
|
|
}
|
2017-06-13 17:00:24 +02:00
|
|
|
p, err := ReadPage(rel, d)
|
2017-06-10 23:51:46 +02:00
|
|
|
if err != nil {
|
|
|
|
return err
|
2017-06-10 21:38:09 +02:00
|
|
|
}
|
2017-06-11 01:32:39 +02:00
|
|
|
if p.Published {
|
2017-06-13 17:00:24 +02:00
|
|
|
s.Paths[p.Permalink] = p
|
2017-06-11 01:32:39 +02:00
|
|
|
}
|
2017-06-10 21:38:09 +02:00
|
|
|
return nil
|
|
|
|
}
|
2017-06-11 01:32:39 +02:00
|
|
|
|
2017-06-13 17:00:24 +02:00
|
|
|
if err := filepath.Walk(s.Config.SourceDir, walkFn); err != nil {
|
|
|
|
return err
|
2017-06-13 14:55:15 +02:00
|
|
|
}
|
2017-06-13 17:00:24 +02:00
|
|
|
return s.ReadCollections()
|
2017-06-13 14:55:15 +02:00
|
|
|
}
|
2017-06-10 23:51:46 +02:00
|
|
|
|
2017-06-13 14:55:15 +02:00
|
|
|
// ReadCollections scans the file system for collections. It adds each collection's
|
|
|
|
// pages to the site map, and creates a template site variable for each collection.
|
2017-06-13 17:00:24 +02:00
|
|
|
func (s *Site) ReadCollections() error {
|
|
|
|
for name, d := range s.Config.Collections {
|
2017-06-13 14:55:15 +02:00
|
|
|
data, ok := d.(map[interface{}]interface{})
|
2017-06-11 01:32:39 +02:00
|
|
|
if !ok {
|
|
|
|
panic("expected collection value to be a map")
|
2017-06-10 23:51:46 +02:00
|
|
|
}
|
2017-06-13 17:00:24 +02:00
|
|
|
c := makeCollection(s, name, data)
|
|
|
|
if c.Output { // TODO always read the pages; just don't build them / include them in routes
|
|
|
|
if err := c.ReadPages(); err != nil {
|
2017-06-13 14:55:15 +02:00
|
|
|
return err
|
2017-06-10 23:51:46 +02:00
|
|
|
}
|
|
|
|
}
|
2017-06-13 17:00:24 +02:00
|
|
|
s.Data[c.Name] = c.PageData()
|
2017-06-10 23:51:46 +02:00
|
|
|
}
|
2017-06-13 14:55:15 +02:00
|
|
|
return nil
|
|
|
|
}
|