func parse(name string, mode present.ParseMode) (*present.Doc, error) { f, err := os.Open(name) if err != nil { return nil, err } defer f.Close() return present.Parse(f, name, 0) }
func loadHomeArticle() []byte { const fname = "assets/home.article" f, err := os.Open(fname) if err != nil { panic(err) } defer f.Close() doc, err := present.Parse(f, fname, 0) if err != nil { panic(err) } var buf bytes.Buffer if err := renderPresentation(&buf, fname, doc); err != nil { panic(err) } return buf.Bytes() }
// parseLesson parses and returns a lesson content given its name and // the template to render it. func parseLesson(tmpl *template.Template, path string) ([]byte, error) { f, err := os.Open(path) if err != nil { return nil, err } defer f.Close() doc, err := present.Parse(prepContent(f), path, 0) if err != nil { return nil, err } lesson := Lesson{ doc.Title, doc.Subtitle, make([]Page, len(doc.Sections)), } for i, sec := range doc.Sections { p := &lesson.Pages[i] w := new(bytes.Buffer) if err := sec.Render(w, tmpl); err != nil { return nil, fmt.Errorf("render section: %v", err) } p.Title = sec.Title p.Content = w.String() codes := findPlayCode(sec) p.Files = make([]File, len(codes)) for i, c := range codes { f := &p.Files[i] f.Name = c.FileName f.Content = string(c.Raw) hash := sha1.Sum(c.Raw) f.Hash = base64.StdEncoding.EncodeToString(hash[:]) } } w := new(bytes.Buffer) if err := json.NewEncoder(w).Encode(lesson); err != nil { return nil, fmt.Errorf("encode lesson: %v", err) } return w.Bytes(), nil }
// loadDocs reads all content from the provided file system root, renders all // the articles it finds, adds them to the Server's docs field, computes the // denormalized docPaths, docTags, and tags fields, and populates the various // helper fields (Next, Previous, Related) for each Doc. func (s *Server) loadDocs(root string) error { // Read content into docs field. const ext = ".article" fn := func(p string, info os.FileInfo, err error) error { if filepath.Ext(p) != ext { return nil } f, err := os.Open(p) if err != nil { return err } defer f.Close() d, err := present.Parse(f, p, 0) if err != nil { return err } html := new(bytes.Buffer) err = d.Render(html, s.template.doc) if err != nil { return err } p = p[len(root) : len(p)-len(ext)] // trim root and extension p = filepath.ToSlash(p) // article time is at 11am by default if no time is provided // remove one hour so that the article is online when the cron job is triggered at 11am if d.Time.Add(-time.Hour).Before(time.Now()) || appengine.IsDevAppServer() { s.docs = append(s.docs, &Doc{ Doc: d, Path: s.cfg.BasePath + p, Permalink: s.cfg.BaseURL + p, HTML: template.HTML(html.String()), }) } return nil } err := filepath.Walk(root, fn) if err != nil { return err } sort.Sort(docsByTime(s.docs)) // Pull out doc paths and tags and put in reverse-associating maps. s.docPaths = make(map[string]*Doc) s.docTags = make(map[string][]*Doc) for _, d := range s.docs { s.docPaths[strings.TrimPrefix(d.Path, s.cfg.BasePath)] = d for _, t := range d.Tags { s.docTags[t] = append(s.docTags[t], d) } } // Pull out unique sorted list of tags. for t := range s.docTags { s.tags = append(s.tags, t) } sort.Strings(s.tags) // Set up presentation-related fields, Newer, Older, and Related. for _, doc := range s.docs { // Newer, Older: docs adjacent to doc for i := range s.docs { if s.docs[i] != doc { continue } if i > 0 { doc.Newer = s.docs[i-1] } if i+1 < len(s.docs) { doc.Older = s.docs[i+1] } break } // Related: all docs that share tags with doc. related := make(map[*Doc]bool) for _, t := range doc.Tags { for _, d := range s.docTags[t] { if d != doc { related[d] = true } } } for d := range related { doc.Related = append(doc.Related, d) } sort.Sort(docsByTime(doc.Related)) } return nil }