func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layouts ...string) error { renderBuffer := bp.GetBuffer() defer bp.PutBuffer(renderBuffer) renderBuffer.WriteString("<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>\n") err := s.render(name, d, renderBuffer, layouts...) outBuffer := bp.GetBuffer() defer bp.PutBuffer(outBuffer) var path []byte if viper.GetBool("RelativeURLs") { path = []byte(helpers.GetDottedRelativePath(dest)) } else { s := viper.GetString("BaseURL") if !strings.HasSuffix(s, "/") { s += "/" } path = []byte(s) } transformer := transform.NewChain(transform.AbsURLInXML) transformer.Apply(outBuffer, renderBuffer, path) if err == nil { err = s.WriteDestFile(dest, outBuffer) } return err }
func (s *Site) renderAndWritePage(name string, dest string, d interface{}, layouts ...string) error { renderBuffer := bp.GetBuffer() defer bp.PutBuffer(renderBuffer) err := s.render(name, d, renderBuffer, layouts...) outBuffer := bp.GetBuffer() defer bp.PutBuffer(outBuffer) transformLinks := transform.NewEmptyTransforms() if viper.GetBool("CanonifyUrls") { absURL, err := transform.AbsURL(viper.GetString("BaseUrl")) if err != nil { return err } transformLinks = append(transformLinks, absURL...) } if viper.GetBool("watch") && !viper.GetBool("DisableLiveReload") { transformLinks = append(transformLinks, transform.LiveReloadInject) } transformer := transform.NewChain(transformLinks...) transformer.Apply(outBuffer, renderBuffer) if err == nil { if err = s.WriteDestPage(dest, outBuffer); err != nil { return err } } return err }
func (s *Site) renderAndWritePage(name string, dest string, d interface{}, layouts ...string) error { renderBuffer := bp.GetBuffer() defer bp.PutBuffer(renderBuffer) err := s.render(name, d, renderBuffer, layouts...) outBuffer := bp.GetBuffer() defer bp.PutBuffer(outBuffer) transformLinks := transform.NewEmptyTransforms() if viper.GetBool("RelativeURLs") || viper.GetBool("CanonifyURLs") { transformLinks = append(transformLinks, transform.AbsURL) } if s.Running() && viper.GetBool("watch") && !viper.GetBool("DisableLiveReload") { transformLinks = append(transformLinks, transform.LiveReloadInject) } var path []byte if viper.GetBool("RelativeURLs") { translated, err := s.PageTarget().(target.OptionalTranslator).TranslateRelative(dest) if err != nil { return err } path = []byte(helpers.GetDottedRelativePath(translated)) } else if viper.GetBool("CanonifyURLs") { s := viper.GetString("BaseURL") if !strings.HasSuffix(s, "/") { s += "/" } path = []byte(s) } transformer := transform.NewChain(transformLinks...) transformer.Apply(outBuffer, renderBuffer, path) if outBuffer.Len() == 0 { jww.WARN.Printf("%q is rendered empty\n", dest) if dest == "/" { jww.ERROR.Println("=============================================================") jww.ERROR.Println("Your rendered home page is blank: /index.html is zero-length") jww.ERROR.Println(" * Did you specify a theme on the command-line or in your") jww.ERROR.Printf(" %q file? (Current theme: %q)\n", filepath.Base(viper.ConfigFileUsed()), viper.GetString("Theme")) if !viper.GetBool("Verbose") { jww.ERROR.Println(" * For more debugging information, run \"hugo -v\"") } jww.ERROR.Println("=============================================================") } } if err == nil { if err = s.WriteDestPage(dest, outBuffer); err != nil { return err } } return err }
// StripHTML accepts a string, strips out all HTML tags and returns it. func StripHTML(s string) string { // Shortcut strings with no tags in them if !strings.ContainsAny(s, "<>") { return s } s = stripHTMLReplacer.Replace(s) // Walk through the string removing all tags b := bp.GetBuffer() defer bp.PutBuffer(b) inTag := false for _, r := range s { switch r { case '<': inTag = true case '>': inTag = false default: if !inTag { b.WriteRune(r) } } } return b.String() }
func doReplace(content []byte, matchers []absurlMatcher) []byte { b := bp.GetBuffer() defer bp.PutBuffer(b) var items []item if x := itemSlicePool.Get(); x != nil { items = x.([]item)[:0] defer itemSlicePool.Put(items) } else { items = make([]item, 0, 8) } lexer := &contentlexer{content: content, items: items, prefixLookup: &prefixes{pr: mainPrefixRunes}, matchers: matchers} lexer.runReplacer() for _, token := range lexer.items { switch token.typ { case tText: b.Write(token.val) default: for _, e := range matchers { if token.typ == e.replaceType { b.Write(e.replacement) break } } } } return b.Bytes() }
func (s *Site) renderAndWritePage(name string, dest string, d interface{}, layouts ...string) error { renderBuffer := bp.GetBuffer() defer bp.PutBuffer(renderBuffer) err := s.render(name, d, renderBuffer, layouts...) outBuffer := bp.GetBuffer() defer bp.PutBuffer(outBuffer) transformLinks := transform.NewEmptyTransforms() if viper.GetBool("RelativeURLs") || viper.GetBool("CanonifyURLs") { transformLinks = append(transformLinks, transform.AbsURL) } if s.Running() && viper.GetBool("watch") && !viper.GetBool("DisableLiveReload") { transformLinks = append(transformLinks, transform.LiveReloadInject) } var path []byte if viper.GetBool("RelativeURLs") { translated, err := s.PageTarget().(target.OptionalTranslator).TranslateRelative(dest) if err != nil { return err } path = []byte(helpers.GetDottedRelativePath(translated)) } else if viper.GetBool("CanonifyURLs") { s := viper.GetString("BaseURL") if !strings.HasSuffix(s, "/") { s += "/" } path = []byte(s) } transformer := transform.NewChain(transformLinks...) transformer.Apply(outBuffer, renderBuffer, path) if err == nil { if err = s.WriteDestPage(dest, outBuffer); err != nil { return err } } return err }
// ReaderToString is the same as ReaderToBytes, but returns a string. func ReaderToString(lines io.Reader) string { if lines == nil { return "" } b := bp.GetBuffer() defer bp.PutBuffer(b) b.ReadFrom(lines) return b.String() }
// ReaderToBytes takes an io.Reader argument, reads from it // and returns bytes. func ReaderToBytes(lines io.Reader) []byte { b := bp.GetBuffer() defer bp.PutBuffer(b) b.ReadFrom(lines) bc := make([]byte, b.Len(), b.Len()) copy(bc, b.Bytes()) return bc }
func renderShortcodeWithPage(tmpl *template.Template, data *ShortcodeWithPage) string { buffer := bp.GetBuffer() defer bp.PutBuffer(buffer) err := tmpl.Execute(buffer, data) if err != nil { jww.ERROR.Println("error processing shortcode", tmpl.Name(), "\n ERR:", err) jww.WARN.Println(data) } return buffer.String() }
func doReplace(content []byte, matchers []absURLMatcher) []byte { b := bp.GetBuffer() defer bp.PutBuffer(b) lexer := &contentlexer{content: content, b: b, prefixLookup: &prefixes{pr: mainPrefixRunes}, matchers: matchers} lexer.replace() return b.Bytes() }
func renderShortcodeWithPage(tmpl *template.Template, data *ShortcodeWithPage) string { buffer := bp.GetBuffer() defer bp.PutBuffer(buffer) // TODO(bep) Refactor/rename this lock strategy isInnerShortcodeCache.Lock() defer isInnerShortcodeCache.Unlock() err := tmpl.Execute(buffer, data) if err != nil { jww.ERROR.Println("error processing shortcode", tmpl.Name(), "\n ERR:", err) jww.WARN.Println(data) } return buffer.String() }
func BenchmarkEmojiKyokomiFprint(b *testing.B) { f := func(in []byte) []byte { buff := bufferpool.GetBuffer() defer bufferpool.PutBuffer(buff) emoji.Fprint(buff, string(in)) bc := make([]byte, buff.Len(), buff.Len()) copy(bc, buff.Bytes()) return bc } doBenchmarkEmoji(b, f) }
func (c *chain) Apply(w io.Writer, r io.Reader) (err error) { buffer := bp.GetBuffer() defer bp.PutBuffer(buffer) buffer.ReadFrom(r) b := buffer.Bytes() for _, tr := range *c { b = tr(b) } buffer.Reset() buffer.Write(b) buffer.WriteTo(w) return }
func (c *chain) Apply(w io.Writer, r io.Reader, p []byte) error { b1 := bp.GetBuffer() defer bp.PutBuffer(b1) b1.ReadFrom(r) if len(*c) == 0 { b1.WriteTo(w) return nil } b2 := bp.GetBuffer() defer bp.PutBuffer(b2) fb := &fromToBuffer{path: p, from: b1, to: b2} for i, tr := range *c { if i > 0 { if fb.from == b1 { fb.from = b2 fb.to = b1 fb.to.Reset() } else { fb.from = b1 fb.to = b2 fb.to.Reset() } } tr(fb) } fb.to.WriteTo(w) return nil }
func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layouts ...string) error { renderBuffer := bp.GetBuffer() defer bp.PutBuffer(renderBuffer) renderBuffer.WriteString("<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>\n") err := s.render(name, d, renderBuffer, layouts...) absURLInXML, err := transform.AbsURLInXML(viper.GetString("BaseUrl")) if err != nil { return err } outBuffer := bp.GetBuffer() defer bp.PutBuffer(outBuffer) transformer := transform.NewChain(absURLInXML...) transformer.Apply(outBuffer, renderBuffer) if err == nil { err = s.WriteDestFile(dest, outBuffer) } return err }
func (p *Page) saveSourceAs(path string, safe bool) error { b := bp.GetBuffer() defer bp.PutBuffer(b) b.Write(p.Source.Frontmatter) b.Write(p.Source.Content) bc := make([]byte, b.Len(), b.Len()) copy(bc, b.Bytes()) err := p.saveSource(bc, path, safe) if err != nil { return err } return nil }
func (s *Site) RenderRobotsTXT() error { if viper.GetBool("DisableRobotsTXT") { return nil } n := s.NewNode() n.Data["Pages"] = s.Pages rLayouts := []string{"robots.txt", "_default/robots.txt", "_internal/_default/robots.txt"} outBuffer := bp.GetBuffer() defer bp.PutBuffer(outBuffer) err := s.render("robots", n, outBuffer, s.appendThemeTemplates(rLayouts)...) if err == nil { err = s.WriteDestFile("robots.txt", outBuffer) } return err }
// StripHTML accepts a string, strips out all HTML tags and returns it. func StripHTML(s string) string { // Shortcut strings with no tags in them if !strings.ContainsAny(s, "<>") { return s } s = stripHTMLReplacer.Replace(s) // Walk through the string removing all tags b := bp.GetBuffer() defer bp.PutBuffer(b) var inTag, isSpace, wasSpace bool for _, r := range s { if !inTag { isSpace = false } switch { case r == '<': inTag = true case r == '>': inTag = false case unicode.IsSpace(r): isSpace = true fallthrough default: if !inTag && (!isSpace || (isSpace && !wasSpace)) { b.WriteRune(r) } } wasSpace = isSpace } return b.String() }
func extractShortcodes(stringToParse string, p *Page, t tpl.Template) (string, map[string]shortcode, error) { shortCodes := make(map[string]shortcode) startIdx := strings.Index(stringToParse, "{{") // short cut for docs with no shortcodes if startIdx < 0 { return stringToParse, shortCodes, nil } // the parser takes a string; // since this is an internal API, it could make sense to use the mutable []byte all the way, but // it seems that the time isn't really spent in the byte copy operations, and the impl. gets a lot cleaner pt := &pageTokens{lexer: newShortcodeLexer("parse-page", stringToParse, pos(startIdx))} id := 1 // incremented id, will be appended onto temp. shortcode placeholders result := bp.GetBuffer() defer bp.PutBuffer(result) //var result bytes.Buffer // the parser is guaranteed to return items in proper order or fail, so … // … it's safe to keep some "global" state var currItem item var currShortcode shortcode var err error Loop: for { currItem = pt.next() switch currItem.typ { case tText: result.WriteString(currItem.val) case tLeftDelimScWithMarkup, tLeftDelimScNoMarkup: // let extractShortcode handle left delim (will do so recursively) pt.backup() if currShortcode, err = extractShortcode(pt, p, t); err != nil { return result.String(), shortCodes, err } if currShortcode.params == nil { currShortcode.params = make([]string, 0) } placeHolder := createShortcodePlaceholder(id) result.WriteString(placeHolder) shortCodes[placeHolder] = currShortcode id++ case tEOF: break Loop case tError: err := fmt.Errorf("%s:%d: %s", p.BaseFileName(), (p.lineNumRawContentStart() + pt.lexer.lineNum() - 1), currItem) currShortcode.err = err return result.String(), shortCodes, err } } return result.String(), shortCodes, nil }
func ExecuteTemplateToHTML(context interface{}, layouts ...string) template.HTML { b := bp.GetBuffer() defer bp.PutBuffer(b) executeTemplate(context, b, layouts...) return template.HTML(b.String()) }
// Replace prefixed shortcode tokens (HUGOSHORTCODE-1, HUGOSHORTCODE-2) with the real content. func replaceShortcodeTokens(source []byte, prefix string, replacements map[string]string) ([]byte, error) { if len(replacements) == 0 { return source, nil } buff := bp.GetBuffer() defer bp.PutBuffer(buff) sourceLen := len(source) start := 0 pre := []byte("{@{@" + prefix) post := []byte("@}@}") pStart := []byte("<p>") pEnd := []byte("</p>") k := bytes.Index(source[start:], pre) for k != -1 { j := start + k postIdx := bytes.Index(source[j:], post) if postIdx < 0 { // this should never happen, but let the caller decide to panic or not return nil, errors.New("illegal state in content; shortcode token missing end delim") } end := j + postIdx + 4 newVal := []byte(replacements[string(source[j:end])]) // Issue #1148: Check for wrapping p-tags <p> if j >= 3 && bytes.Equal(source[j-3:j], pStart) { if (k+4) < sourceLen && bytes.Equal(source[end:end+4], pEnd) { j -= 3 end += 4 } } oldVal := source[j:end] _, err := buff.Write(source[start:j]) if err != nil { return nil, errors.New("buff write failed") } _, err = buff.Write(newVal) if err != nil { return nil, errors.New("buff write failed") } start = j + len(oldVal) k = bytes.Index(source[start:], pre) } _, err := buff.Write(source[start:]) if err != nil { return nil, errors.New("buff write failed") } bc := make([]byte, buff.Len(), buff.Len()) copy(bc, buff.Bytes()) return bc, nil }