Skip to content

Commit

Permalink
feat: Generalising Directory Parsing
Browse files Browse the repository at this point in the history
- (temp) pages must require a `type=page` in frontmatter
- Added backlinks validation and parsing
- fix & added GenerateLinkStore method
- TODO: Fix AddfileRendere Tests
- TODO: zettel-implementation

Co-authored-by: Anirudh Sudhir <[email protected]>
  • Loading branch information
bwaklog and anirudhsudhir committed Apr 11, 2024
1 parent fe8df9c commit d01aa8b
Show file tree
Hide file tree
Showing 22 changed files with 321 additions and 234 deletions.
20 changes: 15 additions & 5 deletions cmd/anna/anna.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package anna

import (
"fmt"
"html/template"
"log"
"os"
Expand All @@ -20,8 +21,8 @@ type Cmd struct {
func (cmd *Cmd) VanillaRender() {
// Defining Engine and Parser Structures
p := parser.Parser{
Templates: make(map[template.URL]parser.TemplateData),
TagsMap: make(map[template.URL][]parser.TemplateData),
Templates: make(map[template.URL]parser.TemplateData, 10),
TagsMap: make(map[template.URL][]parser.TemplateData, 10),
ErrorLogger: log.New(os.Stderr, "ERROR\t", log.Ldate|log.Ltime|log.Lshortfile),
RenderDrafts: cmd.RenderDrafts,
LiveReload: cmd.LiveReload,
Expand All @@ -30,8 +31,10 @@ func (cmd *Cmd) VanillaRender() {
e := engine.Engine{
ErrorLogger: log.New(os.Stderr, "ERROR\t", log.Ldate|log.Ltime|log.Lshortfile),
}
e.DeepDataMerge.Templates = make(map[template.URL]parser.TemplateData)
e.DeepDataMerge.TagsMap = make(map[template.URL][]parser.TemplateData)
e.DeepDataMerge.Templates = make(map[template.URL]parser.TemplateData, 10)
e.DeepDataMerge.TagsMap = make(map[template.URL][]parser.TemplateData, 10)
e.DeepDataMerge.Notes = make(map[template.URL]parser.Note, 10)
e.DeepDataMerge.LinkStore = make(map[template.URL][]*parser.Note, 10)

helper := helpers.Helper{
ErrorLogger: e.ErrorLogger,
Expand All @@ -45,7 +48,9 @@ func (cmd *Cmd) VanillaRender() {
p.ParseConfig(helpers.SiteDataPath + "layout/config.yml")

fileSystem := os.DirFS(helpers.SiteDataPath + "content/")
p.Notes = make(map[template.URL]parser.Note, 10)
p.ParseMDDir(helpers.SiteDataPath+"content/", fileSystem)
p.BackLinkParser()

p.ParseRobots(helpers.SiteDataPath+"layout/robots.txt", helpers.SiteDataPath+"rendered/robots.txt")
p.ParseLayoutFiles()
Expand Down Expand Up @@ -77,5 +82,10 @@ func (cmd *Cmd) VanillaRender() {
e.RenderUserDefinedPages(helpers.SiteDataPath, templ)

e.RenderTags(helpers.SiteDataPath, templ)
cmd.VanillaNoteRender(p.LayoutConfig)

// Zettel engine functionality
e.DeepDataMerge.Notes = p.Notes

e.GenerateLinkStore()
fmt.Println(e.DeepDataMerge.LinkStore)
}
11 changes: 5 additions & 6 deletions cmd/anna/notes.go → cmd/anna/notes.go.temp
Original file line number Diff line number Diff line change
Expand Up @@ -5,24 +5,23 @@ import (
"log"
"os"

"github.com/acmpesuecc/anna/pkg/engine"
"github.com/acmpesuecc/anna/pkg/helpers"
"github.com/acmpesuecc/anna/pkg/parser"
zettel_engine "github.com/acmpesuecc/anna/pkg/zettel/engine"
zettel_parser "github.com/acmpesuecc/anna/pkg/zettel/parser"
)

func (cmd *Cmd) VanillaNoteRender(LayoutConfig parser.LayoutConfig) {
p := zettel_parser.Parser{
p := parser.Parser{
ErrorLogger: log.New(os.Stderr, "ERROR\t", log.Ldate|log.Ltime|log.Lshortfile),
}
p.NotesMergedData.Notes = make(map[template.URL]zettel_parser.Note)
p.NotesMergedData.LinkStore = make(map[template.URL][]*zettel_parser.Note)
p.Notes = make(map[template.URL]parser.Note)
e.LinkStore = make(map[template.URL][]*parser.Note)

fileSystem := os.DirFS(helpers.SiteDataPath + "content/notes")
p.Layout = LayoutConfig
p.ParseNotesDir(helpers.SiteDataPath+"content/notes/", fileSystem)

e := zettel_engine.Engine{
e := engine.Engine{
ErrorLogger: log.New(os.Stderr, "ERROR\t", log.Ldate|log.Ltime|log.Lshortfile),
}
e.NotesMergedData.Notes = make(map[template.URL]zettel_parser.Note)
Expand Down
6 changes: 6 additions & 0 deletions pkg/engine/engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,12 @@ type DeepDataMerge struct {
// Posts contains the template data of files in the posts directory
Posts []parser.TemplateData

//Stores all the notes
Notes map[template.URL]parser.Note

//Stores the links of each note to other notes
LinkStore map[template.URL][]*parser.Note

// Stores the index generated for search functionality
JSONIndex map[template.URL]JSONIndexTemplate
}
Expand Down
12 changes: 1 addition & 11 deletions pkg/engine/user_engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import (
"html/template"
"os"
"runtime"
"strings"
"sync"

"github.com/acmpesuecc/anna/pkg/parser"
Expand All @@ -28,14 +27,6 @@ func (e *Engine) RenderEngineGeneratedFiles(fileOutPath string, template *templa
DeepDataMerge: e.DeepDataMerge,
PageURL: "posts.html",
}
// e.DeepDataMerge.Templates["posts.html"] = parser.TemplateData{
// Frontmatter: parser.Frontmatter{Title: "Posts"},
// }

// pageData := PageData{
// DeepDataMerge: e.DeepDataMerge,
// PageURL: "posts.html",
// }

err := template.ExecuteTemplate(&postsBuffer, "posts", postsData)
if err != nil {
Expand Down Expand Up @@ -67,8 +58,7 @@ func (e *Engine) RenderUserDefinedPages(fileOutPath string, templ *template.Temp
semaphore := make(chan struct{}, concurrency)

for _, templateURL := range templateURLs {
fileInPath := strings.TrimSuffix(templateURL, ".html")
if fileInPath == ".html" {
if templateURL == ".html" {
continue
}

Expand Down
94 changes: 94 additions & 0 deletions pkg/engine/zettel_engine.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
package engine

import (
"html/template"
"runtime"
"sync"
)

func (e *Engine) RenderNotes(fileOutPath string, templ *template.Template) {
// templ.Funcs(funcMap template.FuncMap)

numCPU := runtime.NumCPU()
numTemplates := len(e.DeepDataMerge.Notes)
concurrency := numCPU * 2 // Adjust the concurrency factor based on system hardware resources

if numTemplates < concurrency {
concurrency = numTemplates
}

templateURLs := make([]string, 0, numTemplates)
for templateURL := range e.DeepDataMerge.Notes {
templateURLs = append(templateURLs, string(templateURL))
}

var wg sync.WaitGroup
semaphore := make(chan struct{}, concurrency)

for _, url := range templateURLs {
if url == ".html" {
continue
}

wg.Add(1)
semaphore <- struct{}{}

go func(templateURL string) {
defer func() {
<-semaphore
wg.Done()
}()

e.RenderPage(fileOutPath, template.URL(url), templ, "note")
}(url)
}

wg.Wait()
}

func (e *Engine) GenerateLinkStore() {
for url, note := range e.DeepDataMerge.Notes {
for _, linkURL := range note.LinkedNoteURLs {
linkNote, ok := e.DeepDataMerge.Notes[linkURL]
if ok {
e.DeepDataMerge.LinkStore[url] = append(e.DeepDataMerge.LinkStore[url], &linkNote)
}
}
}
}

// func (z *Zettel) RetrieveNotePointer(noteTitle string) *zettel_parser.Note {
// for _, Note := range e.NotesMergedData.Notes {
// if Note.Frontmatter.Title == noteTitle {
// return &Note
// }
// }
// return nil
// }

// func (e *Engine) GenerateRootNote(fileOutPath string, templ *template.Template) {
// // This is the page that acts as the root of all the
// // notes part of the site

// // Creating a map of all head notes

// var buffer bytes.Buffer

// fmt.Println(e.NotesMergedData.LinkStore)

// /*
// t := template.Must(templ.Funcs(template.FuncMap{
// "Deref": func(i *zettel_parser.Note) zettel_parser.Note { return *note },
// }).Parse(src))
// */

// err := templ.ExecuteTemplate(&buffer, "root", e.NotesMergedData.LinkStore)
// if err != nil {
// e.ErrorLogger.Fatal(err)
// }

// err = os.WriteFile(fileOutPath+"rendered/notes.html", buffer.Bytes(), 0666)
// if err != nil {
// e.ErrorLogger.Fatal(err)
// }
// }
86 changes: 63 additions & 23 deletions pkg/parser/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@ type Frontmatter struct {
PreviewImage string `yaml:"previewimage"`
Tags []string `yaml:"tags"`
Authors []string `yaml:"authors"`

// Head is specifically used for
// mentioning the head of the notes
Head bool `yaml:"head"`
}

// This struct holds all of the data required to render any page of the site
Expand Down Expand Up @@ -63,6 +67,9 @@ type Parser struct {
// Posts contains the template data of files in the posts directory
Posts []TemplateData

//Stores all the notes
Notes map[template.URL]Note

// TODO: Look into the two below fields into a single one
MdFilesName []string
MdFilesPath []string
Expand All @@ -81,13 +88,19 @@ type Parser struct {

func (p *Parser) ParseMDDir(baseDirPath string, baseDirFS fs.FS) {
fs.WalkDir(baseDirFS, ".", func(path string, dir fs.DirEntry, err error) error {
if path != "." && !strings.Contains(path, "notes") {
if path != "." && path != ".obsidian" {
if dir.IsDir() {
subDir := os.DirFS(path)
p.ParseMDDir(path, subDir)
} else {
if filepath.Ext(path) == ".md" {
fileName := filepath.Base(path)
// OLD IMPL
// fileName := filepath.Base(path)
//
// NEW IMPL
// /contents/notes/2134321.md ==> notes/2134321.md
fileName := strings.TrimPrefix(path, baseDirPath)
// fmt.Println(fileNameWithPath, fileName)

content, err := os.ReadFile(baseDirPath + path)
if err != nil {
Expand All @@ -96,19 +109,25 @@ func (p *Parser) ParseMDDir(baseDirPath string, baseDirFS fs.FS) {

fronmatter, body, parseSuccess := p.ParseMarkdownContent(string(content))
if parseSuccess {
if (fronmatter.Draft && p.RenderDrafts) || !fronmatter.Draft {
p.AddFileAndRender(baseDirPath, fileName, fronmatter, body)
if fronmatter.Type == "post" {
if (fronmatter.Draft && p.RenderDrafts) || !fronmatter.Draft {
p.AddFile(baseDirPath, fileName, fronmatter, body)
}
} else {
p.AddFile(baseDirPath, fileName, fronmatter, body)
}
}

}
}
}
return nil
})
}

func (p *Parser) AddFileAndRender(baseDirPath string, dirEntryPath string, frontmatter Frontmatter, body string) {
func (p *Parser) AddFile(baseDirPath string, dirEntryPath string, frontmatter Frontmatter, body string) {
p.MdFilesName = append(p.MdFilesName, dirEntryPath)
// fmt.Println(baseDirPath, dirEntryPath)
filepath := baseDirPath + dirEntryPath
p.MdFilesPath = append(p.MdFilesPath, filepath)

Expand All @@ -122,30 +141,51 @@ func (p *Parser) AddFileAndRender(baseDirPath string, dirEntryPath string, front
key, _ := strings.CutPrefix(filepath, helpers.SiteDataPath+"content/")
url, _ := strings.CutSuffix(key, ".md")
url += ".html"
if frontmatter.Type == "post" {
url = "posts/" + url
}

page := TemplateData{
CompleteURL: template.URL(url),
Date: date,
Frontmatter: frontmatter,
Body: template.HTML(body),
LiveReload: p.LiveReload,
}
if frontmatter.Type == "post" || frontmatter.Type == "page" {

page := TemplateData{
CompleteURL: template.URL(url),
Date: date,
Frontmatter: frontmatter,
Body: template.HTML(body),
LiveReload: p.LiveReload,
}

// Adding the page to the merged map storing all site pages
if frontmatter.Type == "post" {
// url = "posts/" + url
p.Posts = append(p.Posts, page)
}

p.Templates[template.URL(url)] = page

// Adding the page to the tags map with the corresponding tags
for _, tag := range page.Frontmatter.Tags {
tagsMapKey := "tags/" + tag + ".html"
p.TagsMap[template.URL(tagsMapKey)] = append(p.TagsMap[template.URL(tagsMapKey)], page)

}

// Adding the page to the merged map storing all site pages
if frontmatter.Type == "post" {
p.Posts = append(p.Posts, page)
}

p.Templates[template.URL(url)] = page
if frontmatter.Type == "note" {
// url = "notes/" + url

// Adding the page to the tags map with the corresponding tags
for _, tag := range page.Frontmatter.Tags {
tagsMapKey := "tags/" + tag + ".html"
p.TagsMap[template.URL(tagsMapKey)] = append(p.TagsMap[template.URL(tagsMapKey)], page)
note := Note{
CompleteURL: template.URL(url),
Date: date,
Frontmatter: frontmatter,
Body: template.HTML(body),
LinkedNoteURLs: []template.URL{},
}

p.Notes[note.CompleteURL] = note

// NOTE: not adding the template urls of referenced ntoes
// rather, will populate it while links
}

}

func (p *Parser) ParseMarkdownContent(filecontent string) (Frontmatter, string, bool) {
Expand Down
2 changes: 1 addition & 1 deletion pkg/parser/parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ func TestAddFileandRender(t *testing.T) {
want_parser.Posts = append(want_parser.Posts, want_page)
}

got_parser.AddFileAndRender("", filename, sample_frontmatter, sample_body)
got_parser.AddFile("", filename, sample_frontmatter, sample_body)

if !reflect.DeepEqual(got_parser, want_parser) {
t.Errorf("want %v; \ngot %v", want_parser, got_parser)
Expand Down
Loading

0 comments on commit d01aa8b

Please sign in to comment.