Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

(merge) Implement deep data merge #98

Merged
merged 8 commits into from
Apr 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
site/rendered/
anna
!anna/
*.exe
Expand All @@ -7,3 +6,8 @@ anna
ssg/
*.txt
dist/

#Test directories
**/rendered/
test/**/static/
test/**/got_sitemap.xml
25 changes: 14 additions & 11 deletions cmd/anna/anna.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,21 +14,24 @@ import (
type Cmd struct {
RenderDrafts bool
Addr string
LiveReload bool
}

func (cmd *Cmd) VanillaRender() {
// Defining Engine and Parser Structures
p := parser.Parser{
Templates: make(map[template.URL]parser.TemplateData),
TagsMap: make(map[string][]parser.TemplateData),
TagsMap: make(map[template.URL][]parser.TemplateData),
ErrorLogger: log.New(os.Stderr, "ERROR\t", log.Ldate|log.Ltime|log.Lshortfile),
RenderDrafts: cmd.RenderDrafts,
LiveReload: cmd.LiveReload,
}

e := engine.Engine{
Templates: make(map[template.URL]parser.TemplateData),
TagsMap: make(map[string][]parser.TemplateData),
ErrorLogger: log.New(os.Stderr, "ERROR\t", log.Ldate|log.Ltime|log.Lshortfile),
}
e.DeepDataMerge.Templates = make(map[template.URL]parser.TemplateData)
e.DeepDataMerge.TagsMap = make(map[template.URL][]parser.TemplateData)

helper := helpers.Helper{
ErrorLogger: e.ErrorLogger,
Expand All @@ -47,26 +50,26 @@ func (cmd *Cmd) VanillaRender() {
p.ParseRobots(helpers.SiteDataPath+"layout/robots.txt", helpers.SiteDataPath+"rendered/robots.txt")
p.ParseLayoutFiles()

e.Templates = p.Templates
e.TagsMap = p.TagsMap
e.LayoutConfig = p.LayoutConfig
e.Posts = p.Posts
e.DeepDataMerge.Templates = p.Templates
e.DeepDataMerge.TagsMap = p.TagsMap
e.DeepDataMerge.LayoutConfig = p.LayoutConfig
e.DeepDataMerge.Posts = p.Posts

e.GenerateSitemap(helpers.SiteDataPath + "rendered/sitemap.xml")
e.GenerateFeed()
e.GenerateJSONIndex(helpers.SiteDataPath)
helper.CopyDirectoryContents(helpers.SiteDataPath+"static/", helpers.SiteDataPath+"rendered/static/")

sort.Slice(e.Posts, func(i, j int) bool {
return e.Posts[i].Frontmatter.Date > e.Posts[j].Frontmatter.Date
sort.Slice(e.DeepDataMerge.Posts, func(i, j int) bool {
return e.DeepDataMerge.Posts[i].Frontmatter.Date > e.DeepDataMerge.Posts[j].Frontmatter.Date
})

templ, err := template.ParseGlob(helpers.SiteDataPath + "layout/*.layout")
templ, err := template.ParseGlob(helpers.SiteDataPath + "layout/*.html")
if err != nil {
e.ErrorLogger.Fatalf("%v", err)
}

templ, err = templ.ParseGlob(helpers.SiteDataPath + "layout/partials/*.layout")
templ, err = templ.ParseGlob(helpers.SiteDataPath + "layout/partials/*.html")
if err != nil {
e.ErrorLogger.Fatalf("%v", err)
}
Expand Down
35 changes: 34 additions & 1 deletion cmd/anna/livereload.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,16 @@ import (
"net/http"
"os"
"path/filepath"
"sync/atomic"
"time"

"github.com/acmpesuecc/anna/pkg/helpers"
)

var reloadPage = make(chan struct{})

var countRequests atomic.Int32

type liveReload struct {
errorLogger *log.Logger
fileTimes map[string]time.Time
Expand Down Expand Up @@ -43,6 +48,7 @@ func (cmd *Cmd) StartLiveReload() {
for _, rootDir := range lr.rootDirs {
if lr.traverseDirectory(rootDir) {
cmd.VanillaRender()
reloadPage <- struct{}{}
}
}
if !lr.serverRunning {
Expand Down Expand Up @@ -97,8 +103,35 @@ func (lr *liveReload) checkFile(path string, modTime time.Time) bool {

func (lr *liveReload) startServer(addr string) {
fmt.Print("Serving content at: http://localhost:", addr, "\n")
err := http.ListenAndServe(":"+addr, http.FileServer(http.Dir(helpers.SiteDataPath+"./rendered")))
http.Handle("/", http.FileServer(http.Dir(helpers.SiteDataPath+"./rendered")))
http.HandleFunc("/events", eventsHandler)
err := http.ListenAndServe(":"+addr, nil)
if err != nil {
lr.errorLogger.Fatal(err)
}
}

func eventsHandler(w http.ResponseWriter, r *http.Request) {
countRequests.Add(1)

// Set CORS headers to allow all origins.
w.Header().Set("Access-Control-Allow-Origin", "*")
w.Header().Set("Access-Control-Expose-Headers", "Content-Type")

w.Header().Set("Content-Type", "text/event-stream")
w.Header().Set("Cache-Control", "no-cache")
w.Header().Set("Connection", "keep-alive")

if countRequests.Load() == 1 {
<-reloadPage
} else {
countRequests.Store(countRequests.Load() - 1)
return
}

event := "event:\ndata:\n\n"
w.Write([]byte(event))
w.(http.Flusher).Flush()

countRequests.Store(countRequests.Load() - 1)
}
1 change: 1 addition & 0 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ func main() {
}

if serve {
annaCmd.LiveReload = true
annaCmd.StartLiveReload()
}

Expand Down
103 changes: 63 additions & 40 deletions pkg/engine/anna_engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,28 +16,48 @@ import (
"github.com/acmpesuecc/anna/pkg/parser"
)

type TagRootTemplateData struct {
DeepDataMerge DeepDataMerge
PageURL template.URL
TemplateData parser.TemplateData
TagNames []string
}

func (e *Engine) RenderTags(fileOutPath string, templ *template.Template) {
var tagsBuffer bytes.Buffer

// Extracting tag titles
tags := make([]string, 0, len(e.TagsMap))
for tag := range e.TagsMap {
tags := make([]template.URL, 0, len(e.DeepDataMerge.TagsMap))
for tag := range e.DeepDataMerge.TagsMap {
tags = append(tags, tag)
}

slices.SortFunc(tags, func(a, b string) int {
return cmp.Compare(strings.ToLower(a), strings.ToLower(b))
slices.SortFunc(tags, func(a, b template.URL) int {
return cmp.Compare(strings.ToLower(string(a)), strings.ToLower(string(b)))
})

tagNames := parser.TemplateData{
FilenameWithoutExtension: "Tags",
Layout: e.LayoutConfig,
Frontmatter: parser.Frontmatter{Title: "Tags"},
Tags: tags,
tagNames := make([]string, 0, len(tags))
for _, tag := range tags {
tagString := string(tag)
tagString, _ = strings.CutPrefix(tagString, "tags/")
tagString, _ = strings.CutSuffix(tagString, ".html")

tagNames = append(tagNames, tagString)
}

tagRootTemplataData := parser.TemplateData{
Frontmatter: parser.Frontmatter{Title: "Tags"},
}

tagTemplateData := TagRootTemplateData{
DeepDataMerge: e.DeepDataMerge,
PageURL: "tags.html",
TemplateData: tagRootTemplataData,
TagNames: tagNames,
}

// Rendering the page displaying all tags
err := templ.ExecuteTemplate(&tagsBuffer, "all-tags", tagNames)
err := templ.ExecuteTemplate(&tagsBuffer, "all-tags", tagTemplateData)
if err != nil {
e.ErrorLogger.Fatal(err)
}
Expand All @@ -51,23 +71,27 @@ func (e *Engine) RenderTags(fileOutPath string, templ *template.Template) {
// Create a wait group to wait for all goroutines to finish
var wg sync.WaitGroup

e.DeepDataMerge.Tags = make(map[template.URL]parser.TemplateData)

for tag := range e.DeepDataMerge.TagsMap {
tagString := string(tag)
tagString, _ = strings.CutPrefix(tagString, "tags/")
tagString, _ = strings.CutSuffix(tagString, ".html")

e.DeepDataMerge.Tags[tag] = parser.TemplateData{
Frontmatter: parser.Frontmatter{
Title: tagString,
},
}
}

// Rendering the subpages with merged tagged posts
for tag, taggedTemplates := range e.TagsMap {
for tag, taggedTemplates := range e.DeepDataMerge.TagsMap {
wg.Add(1)
go func(tag string, taggedTemplates []parser.TemplateData) {
go func(tag template.URL, taggedTemplates []parser.TemplateData) {
defer wg.Done()

pagePath := "tags/" + tag
templateData := parser.TemplateData{
FilenameWithoutExtension: tag,
Layout: e.LayoutConfig,
Frontmatter: parser.Frontmatter{
Title: tag,
},
SpecificTagTemplates: taggedTemplates,
}

e.RenderPage(fileOutPath, template.URL(pagePath), templateData, templ, "tag-subpage")
e.RenderPage(fileOutPath, template.URL(tag), templ, "tag-subpage")
}(tag, taggedTemplates)
}

Expand All @@ -88,16 +112,15 @@ func (e *Engine) GenerateJSONIndex(outFilePath string) {

// Copying contents from e.Templates to new JsonMerged struct
jsonIndexTemplate := make(map[template.URL]JSONIndexTemplate)
for templateURL, templateData := range e.Templates {
for templateURL, templateData := range e.DeepDataMerge.Templates {
jsonIndexTemplate[templateURL] = JSONIndexTemplate{
CompleteURL: templateData.CompleteURL,
FilenameWithoutExtension: templateData.FilenameWithoutExtension,
Frontmatter: templateData.Frontmatter,
Tags: templateData.Frontmatter.Tags,
CompleteURL: templateData.CompleteURL,
Frontmatter: templateData.Frontmatter,
Tags: templateData.Frontmatter.Tags,
}
}

e.JSONIndex = jsonIndexTemplate
e.DeepDataMerge.JSONIndex = jsonIndexTemplate

// Marshal the contents of jsonMergedData
jsonMergedMarshaledData, err := json.Marshal(jsonIndexTemplate)
Expand All @@ -117,22 +140,22 @@ func (e *Engine) GenerateSitemap(outFilePath string) {
buffer.WriteString("<urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\">\n")

// Sorting templates by key
keys := make([]string, 0, len(e.Templates))
for k := range e.Templates {
keys := make([]string, 0, len(e.DeepDataMerge.Templates))
for k := range e.DeepDataMerge.Templates {
keys = append(keys, string(k))
}
sort.Strings(keys)

tempTemplates := make(map[template.URL]parser.TemplateData)
for _, templateURL := range keys {
tempTemplates[template.URL(templateURL)] = e.Templates[template.URL(templateURL)]
tempTemplates[template.URL(templateURL)] = e.DeepDataMerge.Templates[template.URL(templateURL)]
}

e.Templates = tempTemplates
e.DeepDataMerge.Templates = tempTemplates

// Iterate over parsed markdown files
for _, templateData := range e.Templates {
url := e.LayoutConfig.BaseURL + "/" + templateData.FilenameWithoutExtension + ".html"
for _, templateData := range e.DeepDataMerge.Templates {
url := e.DeepDataMerge.LayoutConfig.BaseURL + "/" + string(templateData.CompleteURL)
buffer.WriteString("\t<url>\n")
buffer.WriteString("\t\t<loc>" + url + "</loc>\n")
buffer.WriteString("\t\t<lastmod>" + templateData.Frontmatter.Date + "</lastmod>\n")
Expand All @@ -156,17 +179,17 @@ func (e *Engine) GenerateFeed() {
buffer.WriteString("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n")
buffer.WriteString("<?xml-stylesheet href=\"/static/styles/feed.xsl\" type=\"text/xsl\"?>\n")
buffer.WriteString("<feed xmlns=\"http://www.w3.org/2005/Atom\">\n")
buffer.WriteString(" <title>" + e.LayoutConfig.SiteTitle + "</title>\n")
buffer.WriteString(" <link href=\"" + e.LayoutConfig.BaseURL + "/" + "\" rel=\"self\"/>\n")
buffer.WriteString(" <title>" + e.DeepDataMerge.LayoutConfig.SiteTitle + "</title>\n")
buffer.WriteString(" <link href=\"" + e.DeepDataMerge.LayoutConfig.BaseURL + "/" + "\" rel=\"self\"/>\n")
buffer.WriteString(" <updated>" + time.Now().Format(time.RFC3339) + "</updated>\n")

// iterate over parsed markdown files that are non-draft posts
for _, templateData := range e.Templates {
for _, templateData := range e.DeepDataMerge.Templates {
if !templateData.Frontmatter.Draft {
buffer.WriteString("<entry>\n")
buffer.WriteString(" <title>" + templateData.Frontmatter.Title + "</title>\n")
buffer.WriteString(" <link href=\"" + e.LayoutConfig.BaseURL + "/posts/" + templateData.FilenameWithoutExtension + ".html\"/>\n")
buffer.WriteString(" <id>" + e.LayoutConfig.BaseURL + "/posts/" + templateData.FilenameWithoutExtension + ".html</id>\n")
buffer.WriteString(" <link href=\"" + e.DeepDataMerge.LayoutConfig.BaseURL + string(templateData.CompleteURL) + "/>\n")
buffer.WriteString(" <id>" + e.DeepDataMerge.LayoutConfig.BaseURL + string(templateData.CompleteURL) + "</id>\n")
buffer.WriteString(" <updated>" + time.Unix(templateData.Date, 0).Format(time.RFC3339) + "</updated>\n")
buffer.WriteString(" <content type=\"html\"><![CDATA[" + string(templateData.Body) + "]]></content>\n")
buffer.WriteString(" </entry>\n")
Expand Down
Loading
Loading