Fix BufferSettings append & add ParseFrontMatter
until now buffersettings were always appended using \n which means the first value would already be written as "\nVALUE". Not anymore. Also we finally add an option to parse just the front matter. Still not efficient as we tokenize the whole org file but i don't think saving a few milliseconds would be worth making the code uglier.
This commit is contained in:
parent
0255a129e2
commit
a0e87057d6
3 changed files with 110 additions and 4 deletions
|
@ -5,6 +5,7 @@ import (
|
|||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Document struct {
|
||||
|
@ -51,6 +52,15 @@ var lexFns = []lexFn{
|
|||
|
||||
var nilToken = token{"nil", -1, "", nil}
|
||||
|
||||
var DefaultFrontMatterHandler = func(k, v string) interface{} {
|
||||
switch k {
|
||||
case "TAGS":
|
||||
return strings.Fields(v)
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
func NewDocument() *Document {
|
||||
return &Document{
|
||||
Footnotes: &Footnotes{
|
||||
|
@ -78,6 +88,29 @@ func (d *Document) Write(w Writer) Writer {
|
|||
}
|
||||
|
||||
func (d *Document) Parse(input io.Reader) *Document {
|
||||
d.tokenize(input)
|
||||
_, nodes := d.parseMany(0, func(d *Document, i int) bool { return !(i < len(d.tokens)) })
|
||||
d.Nodes = nodes
|
||||
return d
|
||||
}
|
||||
|
||||
func (d *Document) FrontMatter(input io.Reader, f func(string, string) interface{}) map[string]interface{} {
|
||||
d.tokenize(input)
|
||||
d.parseMany(0, func(d *Document, i int) bool {
|
||||
if !(i < len(d.tokens)) {
|
||||
return true
|
||||
}
|
||||
t := d.tokens[i]
|
||||
return t.kind != "keyword" && !(t.kind == "text" && t.content == "")
|
||||
})
|
||||
frontMatter := make(map[string]interface{}, len(d.BufferSettings))
|
||||
for k, v := range d.BufferSettings {
|
||||
frontMatter[k] = f(k, v)
|
||||
}
|
||||
return frontMatter
|
||||
}
|
||||
|
||||
func (d *Document) tokenize(input io.Reader) {
|
||||
d.tokens = []token{}
|
||||
scanner := bufio.NewScanner(input)
|
||||
for scanner.Scan() {
|
||||
|
@ -86,9 +119,6 @@ func (d *Document) Parse(input io.Reader) *Document {
|
|||
if err := scanner.Err(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
_, nodes := d.parseMany(0, func(d *Document, i int) bool { return !(i < len(d.tokens)) })
|
||||
d.Nodes = nodes
|
||||
return d
|
||||
}
|
||||
|
||||
func (d *Document) Get(key string) string {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue