/ internal / utils.go
utils.go
  1  package utils
  2  
  3  import (
  4  	"encoding/json"
  5  	"errors"
  6  	"fmt"
  7  	"html/template"
  8  	"os"
  9  	"regexp"
 10  	"sort"
 11  	"strings"
 12  	"time"
 13  
 14  	"github.com/gomarkdown/markdown"
 15  	"github.com/gomarkdown/markdown/html"
 16  	"github.com/gomarkdown/markdown/parser"
 17  )
 18  
 19  func CreateSidebarLinks(headers []string) template.HTML {
 20  	var linksHTML string
 21  	for _, header := range headers {
 22  		sanitizedHeader := sanitizeHeaderForID(header)
 23  		link := fmt.Sprintf(`<li><a href="#%s">%s</a></li>`, sanitizedHeader, header)
 24  		linksHTML += link
 25  	}
 26  	return template.HTML(linksHTML)
 27  }
 28  
 29  func ParseExternalMarkdownFile(content []byte) (ExternalPost, error) {
 30  	sections := strings.SplitN(string(content), "---", 3)
 31  	if len(sections) < 3 {
 32  		return ExternalPost{}, errors.New("invalid markdown format")
 33  	}
 34  
 35  	metadata := sections[1]
 36  	mdContent := sections[2]
 37  
 38  	// deal with rogue \r's
 39  	metadata = strings.ReplaceAll(metadata, "\r", "")
 40  	mdContent = strings.ReplaceAll(mdContent, "\r", "")
 41  
 42  	title, source, author, slug, description, published := parseExternalMetadata(metadata)
 43  
 44  	htmlContent := mdToHTML([]byte(mdContent))
 45  	headers := extractHeaders([]byte(mdContent))
 46  
 47  	return ExternalPost{
 48  		Title:       title,
 49  		Source:      source,
 50  		Author:      author,
 51  		Slug:        slug,
 52  		Description: description,
 53  		Content:     template.HTML(htmlContent),
 54  		Headers:     headers,
 55  		Published:   published,
 56  	}, nil
 57  }
 58  
 59  func ParseMarkdownFile(content []byte) (BlogPost, error) {
 60  	sections := strings.SplitN(string(content), "---", 3)
 61  	if len(sections) < 3 {
 62  		return BlogPost{}, errors.New("invalid markdown format")
 63  	}
 64  
 65  	metadata := sections[1]
 66  	mdContent := sections[2]
 67  
 68  	// deal with rogue \r's
 69  	metadata = strings.ReplaceAll(metadata, "\r", "")
 70  	mdContent = strings.ReplaceAll(mdContent, "\r", "")
 71  
 72  	title, slug, parent, description, tags, date, metaDescriptionStr,
 73  		metaPropertyTitleStr, metaPropertyDescriptionStr,
 74  		metaOgURLStr := parseMetadata(metadata)
 75  
 76  	htmlContent := mdToHTML([]byte(mdContent))
 77  	headers := extractHeaders([]byte(mdContent))
 78  
 79  	return BlogPost{
 80  		Title:                   title,
 81  		Slug:                    slug,
 82  		Parent:                  parent,
 83  		Description:             description,
 84  		Tags:                    tags,
 85  		Content:                 template.HTML(htmlContent),
 86  		Headers:                 headers,
 87  		Date:                    date,
 88  		MetaDescription:         metaDescriptionStr,
 89  		MetaPropertyTitle:       metaPropertyTitleStr,
 90  		MetaPropertyDescription: metaPropertyDescriptionStr,
 91  		MetaOgURL:               metaOgURLStr,
 92  	}, nil
 93  }
 94  
 95  func Dict(values ...interface{}) (map[string]interface{}, error) {
 96  	if len(values)%2 != 0 {
 97  		return nil, errors.New("invalid dict call")
 98  	}
 99  	dict := make(map[string]interface{}, len(values)/2)
100  	for i := 0; i < len(values); i += 2 {
101  		key, ok := values[i].(string)
102  		if !ok {
103  			return nil, errors.New("dict keys must be strings")
104  		}
105  		dict[key] = values[i+1]
106  	}
107  	return dict, nil
108  }
109  
110  func BuildData(tag string, postsData Data) Data {
111  	var data Data
112  	var posts []BlogPost
113  	var tags []BlogPost
114  	tag = strings.ReplaceAll(tag, "/", "")
115  
116  	for _, tagPosts := range postsData.TagsPosts {
117  		if _, found := tagPosts.Tag[tag]; found {
118  			posts = tagPosts.Tag[tag]
119  			break
120  		}
121  	}
122  	for _, post := range posts {
123  		// for earch post on markdown/posts directory with rebluild the slug according to year and month
124  		// it should be on a config file later...
125  		pre_slug := strings.Split(post.Date.String(), "-")
126  		post.Slug = pre_slug[0] + "/" + pre_slug[1] + "/" + post.Slug
127  		tags = append(tags, post)
128  	}
129  
130  	cat := &Category{
131  		Name:        "Posts",
132  		Pages:       tags,
133  		NbPosts:     len(posts),
134  		CurrentPage: 1,
135  	}
136  
137  	data.Categories = append(data.Categories, *cat)
138  	return data
139  }
140  
141  func LoadExternalData(dir string) (ExternalData, error) {
142  	var data ExternalData
143  	var jsonpost JsonBlogPost
144  	var jsonposts []JsonBlogPost
145  	categoriesMap := make(map[string]*ExternalCategory)
146  
147  	posts, err := loadExternalMarkdownPosts(dir)
148  	if err != nil {
149  		return data, err
150  	}
151  
152  	for i, post := range posts {
153  		// for earch post on markdown/posts directory with rebluild the slug according to year and month
154  		// it should be on a config file later...
155  		if strings.Contains(dir, "posts") {
156  			pre_slug := strings.Split(post.Published.String(), "-")
157  			post.Slug = pre_slug[0] + "/" + pre_slug[1] + "/" + post.Slug
158  
159  			// genrerate a tiny json posts without content for the search
160  			// unfortunately fusejs can not search in content (too big ?)
161  			// and finally it's not a good idea to generate a big json
162  			post.Id = i
163  			tinypost, _ := json.Marshal(post)
164  			json.Unmarshal([]byte(tinypost), &jsonpost)
165  			jsonposts = append(jsonposts, jsonpost)
166  			jsonpost = JsonBlogPost{}
167  		}
168  
169  		if _, exists := categoriesMap["External"]; !exists {
170  			categoriesMap["External"] = &ExternalCategory{
171  				Name:  "External",
172  				Pages: []ExternalPost{post},
173  			}
174  		} else {
175  			categoriesMap["External"].Pages = append(categoriesMap["External"].Pages, post)
176  		}
177  	}
178  	// convert map to slice
179  	for _, cat := range categoriesMap {
180  		data.Categories = append(data.Categories, *cat)
181  	}
182  
183  	if len(jsonposts) != 0 {
184  		search, err := json.Marshal(jsonposts)
185  		if err != nil {
186  			fmt.Println(err)
187  			return data, err
188  		}
189  
190  		// create json for the search
191  		err = os.WriteFile("static/search.json", search, 0644)
192  		if err != nil {
193  			panic(err)
194  		}
195  	}
196  	return data, nil
197  }
198  
199  func LoadData(dir string) (Data, error) {
200  	var data Data
201  	var jsonpost JsonBlogPost
202  	var jsonposts []JsonBlogPost
203  	categoriesMap := make(map[string]*Category)
204  
205  	posts, err := loadMarkdownPosts(dir)
206  	if err != nil {
207  		return data, err
208  	}
209  
210  	for i, post := range posts {
211  		// for earch post on markdown/posts directory with rebluild the slug according to year and month
212  		// it should be on a config file later...
213  		if strings.Contains(dir, "posts") {
214  			pre_slug := strings.Split(post.Date.String(), "-")
215  			post.Slug = pre_slug[0] + "/" + pre_slug[1] + "/" + post.Slug
216  
217  			// genrerate a tiny json posts without content for the search
218  			// unfortunately fusejs can not search in content (too big ?)
219  			// and finally it's not a good idea to generate a big json
220  			post.Id = i
221  			tinypost, _ := json.Marshal(post)
222  			json.Unmarshal([]byte(tinypost), &jsonpost)
223  			jsonposts = append(jsonposts, jsonpost)
224  			jsonpost = JsonBlogPost{}
225  		}
226  
227  		if post.Parent != "" {
228  			if _, exists := categoriesMap[post.Parent]; !exists {
229  				categoriesMap[post.Parent] = &Category{
230  					Name:        post.Parent,
231  					Pages:       []BlogPost{post},
232  					NbPosts:     1,
233  					CurrentPage: 1,
234  				}
235  			} else {
236  				categoriesMap[post.Parent].Pages = append(categoriesMap[post.Parent].Pages, post)
237  				categoriesMap[post.Parent].NbPosts += 1
238  			}
239  		}
240  	}
241  	// convert map to slice
242  	for _, cat := range categoriesMap {
243  		data.Categories = append(data.Categories, *cat)
244  	}
245  
246  	if len(jsonposts) != 0 {
247  		search, err := json.Marshal(jsonposts)
248  		if err != nil {
249  			fmt.Println(err)
250  			return data, err
251  		}
252  
253  		// create json for the search
254  		err = os.WriteFile("static/search.json", search, 0644)
255  		if err != nil {
256  			panic(err)
257  		}
258  	}
259  
260  	// fmt.Println("search: ", string(search))
261  
262  	// Reach Tags
263  	data.TagsPosts = tagsGenerate(posts)
264  	return data, nil
265  }
266  
267  func tagsGenerate(posts []BlogPost) []TagPosts {
268  	var tgs []TagPosts
269  	x := make(map[string][]BlogPost)
270  
271  	for _, post := range posts {
272  		for _, tag := range post.Tags {
273  			x[tag] = append(x[tag], post)
274  		}
275  		tgs = append(tgs, TagPosts{x})
276  	}
277  	return tgs
278  }
279  
280  func loadExternalMarkdownPosts(dir string) ([]ExternalPost, error) {
281  	var posts []ExternalPost
282  	files, err := os.ReadDir(dir)
283  	if err != nil {
284  		return nil, err
285  	}
286  
287  	for _, file := range files {
288  		if strings.Compare(file.Name(), "index.md") == 0 {
289  			continue
290  		}
291  		if strings.HasSuffix(file.Name(), ".md") {
292  			path := dir + "/" + file.Name()
293  			content, err := os.ReadFile(path)
294  			if err != nil {
295  				return nil, err
296  			}
297  			post, err := ParseExternalMarkdownFile(content)
298  			if err != nil {
299  				return nil, err
300  			}
301  			posts = append(posts, post)
302  		}
303  	}
304  
305  	sort.Slice(posts, func(i, j int) bool {
306  		return posts[i].Published.After(posts[j].Published)
307  	})
308  	return posts, nil
309  }
310  
311  func loadMarkdownPosts(dir string) ([]BlogPost, error) {
312  	var posts []BlogPost
313  	files, err := os.ReadDir(dir)
314  	if err != nil {
315  		return nil, err
316  	}
317  
318  	for _, file := range files {
319  		if strings.Compare(file.Name(), "index.md") == 0 {
320  			continue
321  		}
322  		if strings.HasSuffix(file.Name(), ".md") {
323  			path := dir + "/" + file.Name()
324  			content, err := os.ReadFile(path)
325  			if err != nil {
326  				return nil, err
327  			}
328  			post, err := ParseMarkdownFile(content)
329  			if err != nil {
330  				return nil, err
331  			}
332  			posts = append(posts, post)
333  		}
334  	}
335  
336  	sort.Slice(posts, func(i, j int) bool {
337  		return posts[i].Date.After(posts[j].Date)
338  	})
339  	return posts, nil
340  }
341  
342  func LoadMarkdownPost(file string) (BlogPost, error) {
343  	var post BlogPost
344  	content, err := os.ReadFile(file)
345  	if err != nil {
346  		return post, err
347  	}
348  	post, err = ParseMarkdownFile(content)
349  	if err != nil {
350  		return post, err
351  	}
352  	return post, nil
353  }
354  
355  func mdToHTML(md []byte) []byte {
356  	extensions := parser.CommonExtensions | parser.AutoHeadingIDs | parser.NoEmptyLineBeforeBlock
357  	parser := parser.NewWithExtensions(extensions)
358  
359  	opts := html.RendererOptions{
360  		Flags: html.CommonFlags | html.HrefTargetBlank,
361  	}
362  	renderer := html.NewRenderer(opts)
363  
364  	doc := parser.Parse(md)
365  
366  	output := markdown.Render(doc, renderer)
367  
368  	return output
369  }
370  
371  func parseExternalMetadata(metadata string) (
372  	title string,
373  	source string,
374  	author string,
375  	slug string,
376  	description string,
377  	published time.Time,
378  ) {
379  	metaDataMap := make(map[string]string)
380  
381  	metadataSplit := strings.Split(metadata, "\n")
382  	for _, metadataline := range metadataSplit {
383  		meta := strings.SplitN(metadataline, ":", 2)
384  		if len(meta) > 1 {
385  			metaDataMap[meta[0]] = strings.TrimSpace(meta[1])
386  		}
387  	}
388  
389  	title = metaDataMap["title"]
390  	source = metaDataMap["source"]
391  	author = metaDataMap["author"]
392  	slug = metaDataMap["slug"]
393  	description = metaDataMap["description"]
394  	publishedStr := metaDataMap["published"]
395  
396  	published, err := time.Parse(time.DateOnly, publishedStr)
397  	if err != nil {
398  		fmt.Println(err)
399  	}
400  
401  	return title, source, author, slug, description, published
402  }
403  
404  func parseMetadata(metadata string) (
405  	title string,
406  	slug string,
407  	parent string,
408  	description string,
409  	tags []string,
410  	date time.Time,
411  	metaDescription string,
412  	metaPropertyTitle string,
413  	metaPropertyDescription string,
414  	metaOgURL string,
415  ) {
416  	metaDataMap := make(map[string]string)
417  
418  	metadataSplit := strings.Split(metadata, "\n")
419  	for _, metadataline := range metadataSplit {
420  		meta := strings.SplitN(metadataline, ":", 2)
421  		if len(meta) > 1 {
422  			metaDataMap[meta[0]] = strings.TrimSpace(meta[1])
423  		}
424  	}
425  
426  	title = metaDataMap["title"]
427  	slug = metaDataMap["slug"]
428  	parent = metaDataMap["parent"]
429  	description = metaDataMap["description"]
430  	tagsStr := metaDataMap["tags"]
431  	dateStr := metaDataMap["date"]
432  	metaDescriptionStr := metaDataMap["metaDescription"]
433  	metaPropertyTitleStr := metaDataMap["metaPropertyTitle"]
434  	metaPropertyDescriptionStr := metaDataMap["metaPropertyDescription"]
435  	metaOgURLStr := metaDataMap["metaOgURL"]
436  
437  	if len(metaDataMap["tags"]) != 0 {
438  		tmp := strings.ReplaceAll(tagsStr, "[", "")
439  		tmp = strings.ReplaceAll(tmp, "]", "")
440  		tmp = strings.ReplaceAll(tmp, "\"", "")
441  		tags = strings.Split(tmp, ",")
442  	}
443  
444  	date, err := time.Parse(time.RFC3339, dateStr)
445  	if err != nil {
446  		fmt.Println(err)
447  	}
448  
449  	return title, slug, parent, description, tags, date, metaDescriptionStr,
450  		metaPropertyTitleStr, metaPropertyDescriptionStr, metaOgURLStr
451  }
452  
453  func extractHeaders(content []byte) []string {
454  	var headers []string
455  	//match only level 2 markdown headers
456  	re := regexp.MustCompile(`(?m)^##\s+(.*)`)
457  	matches := re.FindAllSubmatch(content, -1)
458  
459  	for _, match := range matches {
460  		// match[1] contains header text without the '##'
461  		headers = append(headers, string(match[1]))
462  	}
463  
464  	return headers
465  }
466  
467  func sanitizeHeaderForID(header string) string {
468  	// lowercase
469  	header = strings.ToLower(header)
470  	r := strings.NewReplacer(" ", "-", "'", "-", "’", "-")
471  	header = r.Replace(header)
472  
473  	// remove any characters that are not alphanumeric or hyphens
474  	accentedCharacters := "àèìòùÀÈÌÒÙáéíóúýÁÉÍÓÚÝâêîôûÂÊÎÔÛãñõÃÑÕäëïöüÿÄËÏÖÜŸçÇߨøÅ寿œ-"
475  	reg := "[^a-z0-9" + accentedCharacters + "]"
476  	header = regexp.MustCompile(reg).ReplaceAllString(header, "")
477  
478  	return header
479  }