func (p Property) DescriptionInMarkdown() (template.HTML, error) { unsafeMarkdown := blackfriday.MarkdownCommon([]byte(p.Description)) safeMarkdown := bluemonday.UGCPolicy().SanitizeBytes(unsafeMarkdown) // todo sanitized markdown return template.HTML(safeMarkdown), nil }
func generateArticleContent(r *app.Request) (string, string, models.Category, []byte) { articleJson := articleJsonBody{} r.DecodeJsonPayload(&articleJson) // params markdown := articleJson.Markdown title := articleJson.Title category_name := articleJson.Category unsafe := articleJson.Content // find category category := models.Category{} if category_name != "" { category.FindByName(category_name) if category.ID == 0 { category.Name = category_name category.Create() } } // HTML sanitizer html := bluemonday.UGCPolicy().SanitizeBytes([]byte(unsafe)) return title, markdown, category, html }
func (markdown Markdown) Extract(creativeWork *schema.CreativeWork, path string) error { markdownContent, err := ioutil.ReadFile(path) if nil != err { return err } unsafe := blackfriday.MarkdownCommon(markdownContent) p := bluemonday.UGCPolicy() p.RequireNoFollowOnLinks(false) p.AllowAttrs("class").Matching(regexp.MustCompile("^language-[a-zA-Z0-9]+$")).OnElements("code") html := p.SanitizeBytes(unsafe) doc, err := goquery.NewDocumentFromReader(bytes.NewReader(html)) if nil != err { return err } doc.Find("a[href]").Each(func(i int, s *goquery.Selection) { link, _ := s.Attr("href") url, _ := url.Parse(link) if !url.IsAbs() { s.SetAttr("href", strings.Replace(link, ".md", ".jsonld", 1)) } }) creativeWork.Name = doc.Find("h1").Text() creativeWork.Text, err = doc.Find("body").Html() if nil != err { return err } return nil }
func main() { // Define a policy, we are using the UGC policy as a base. p := bluemonday.UGCPolicy() // Add "rel=nofollow" to links p.RequireNoFollowOnLinks(true) p.RequireNoFollowOnFullyQualifiedLinks(true) // Open external links in a new window/tab p.AddTargetBlankToFullyQualifiedLinks(true) // Read input from stdin so that this is a nice unix utility and can receive // piped input dirty, err := ioutil.ReadAll(os.Stdin) if err != nil { log.Fatal(err) } // Apply the policy and write to stdout fmt.Fprint( os.Stdout, p.Sanitize( string(dirty), ), ) }
func readPageAsHtml(docName, PageFilePath string) ([]byte, error) { data, err := ioutil.ReadFile(PageFilePath) if err != nil { return nil, err } unsafe := blackfriday.MarkdownCommon(data) // TODO: It could be possible sanitize content before and after // rendering the wiki-text tags. The post wiki-text sanitising would // be slightly looser and allow html class attributes. unsafe = kaufmann.RenderWikiText(docName, unsafe) p := bluemonday.UGCPolicy() p.AllowAttrs("class").Matching(bluemonday.SpaceSeparatedTokens).Globally() // NOTE: At the moment we are allowing anything to be placed in a data attribute. // We could add a regex to limit the value to valid and safe(!) characters. // But we will have to write a regex. I can't see any thing suitable in // the bluemonday code. // Related: http://stackoverflow.com/q/25897910/395461 p.AllowAttrs("data-pageid").Globally() p.AllowAttrs("data-filename").Globally() html := p.SanitizeBytes(unsafe) return html, nil }
func SanitizeHtml(input []byte) []byte { // return blackfriday.MarkdownCommon(input) policy := bluemonday.UGCPolicy() policy.AllowAttrs("width", "height", "src", "allowfullscreen", "frameborder"). OnElements("iframe") return policy.SanitizeBytes(input) }
// Markdown renders GitHub Flavored Markdown text. func Markdown(text []byte) []byte { htmlFlags := 0 renderer := &renderer{Html: blackfriday.HtmlRenderer(htmlFlags, "", "").(*blackfriday.Html)} // Parser extensions for GitHub Flavored Markdown. extensions := 0 extensions |= blackfriday.EXTENSION_NO_INTRA_EMPHASIS extensions |= blackfriday.EXTENSION_TABLES extensions |= blackfriday.EXTENSION_FENCED_CODE extensions |= blackfriday.EXTENSION_AUTOLINK extensions |= blackfriday.EXTENSION_STRIKETHROUGH extensions |= blackfriday.EXTENSION_SPACE_HEADERS //extensions |= blackfriday.EXTENSION_HARD_LINE_BREAK unsanitized := blackfriday.Markdown(text, renderer, extensions) // GitHub Flavored Markdown-like sanitization policy. p := bluemonday.UGCPolicy() p.AllowAttrs("class").Matching(bluemonday.SpaceSeparatedTokens).OnElements("div", "span") p.AllowAttrs("class", "name").Matching(bluemonday.SpaceSeparatedTokens).OnElements("a") p.AllowAttrs("rel").Matching(regexp.MustCompile(`^nofollow$`)).OnElements("a") p.AllowAttrs("aria-hidden").Matching(regexp.MustCompile(`^true$`)).OnElements("a") p.AllowDataURIImages() return p.SanitizeBytes(unsanitized) }
// MainHandler shows the main page. func MainHandler(ctx context.Context, rw http.ResponseWriter, r *http.Request) { dataRenderer := data.FromContext(ctx) stat, err := os.Stat("news.md") var time time.Time if err == nil { time = stat.ModTime() } bte, err := ioutil.ReadFile("news.md") markdown := []byte("_Couldn't retrieve the latest news._") if err == nil { markdown = bte } output := blackfriday.MarkdownCommon(markdown) dataRenderer.Data = map[string]interface{}{ "Title": "Main", "News": template.HTML(bluemonday.UGCPolicy().SanitizeBytes(output)), "Time": time, "Nav": 0, } dataRenderer.Template = "index" }
func articleHandler(w http.ResponseWriter, req *http.Request) { uri := req.RequestURI name := uri[len("/article/"):] var selected Article for _, article := range gArticles { if name == article.name || name+".md" == article.name { selected = article } } if selected.path == "" { w.WriteHeader(404) fmt.Fprintf(w, "Not found") return } data, err := ioutil.ReadFile(selected.path) if err != nil { w.WriteHeader(500) fmt.Fprint(w, err) return } unsafe := blackfriday.MarkdownCommon(data) html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) w.Header().Add("Content-Type", "text/html") w.WriteHeader(200) w.Write(html) }
func Convert(input string) string { inputBytes := []byte(input) unsafeBytes := blackfriday.MarkdownCommon(inputBytes) htmlBytes := bluemonday.UGCPolicy().SanitizeBytes(unsafeBytes) return string(htmlBytes) }
func main() { esInput := html.EscapeString(input) unsafe := blackfriday.MarkdownCommon([]byte(esInput)) html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) fmt.Println(string(html)) }
func renderMarkdown(c *gin.Context, currentText string, title string, versions []versionsInfo) { fmt.Println(currentText) unsafe := blackfriday.MarkdownCommon([]byte(currentText)) fmt.Println(string(unsafe)) pClean := bluemonday.UGCPolicy() pClean.AllowElements("img") pClean.AllowAttrs("alt").OnElements("img") pClean.AllowAttrs("src").OnElements("img") pClean.AllowDataURIImages() html := pClean.SanitizeBytes(unsafe) html2 := string(html) r, _ := regexp.Compile("\\$\\$(.*?)\\$\\$") for _, s := range r.FindAllString(html2, -1) { html2 = strings.Replace(html2, s, "<span class='texp' data-expr='"+s[2:len(s)-2]+"'></span>", 1) } r, _ = regexp.Compile("\\$(.*?)\\$") for _, s := range r.FindAllString(html2, -1) { html2 = strings.Replace(html2, s, "<span class='texi' data-expr='"+s[1:len(s)-1]+"'></span>", 1) } html2 = strings.Replace(html2, "&#36;", "$", -1) c.HTML(http.StatusOK, "view.tmpl", gin.H{ "Title": title, "Body": template.HTML([]byte(html2)), "Versions": versions, }) }
func (presenter EntryPresenter) FormattedExcerpt() template.HTML { unescaped := html.UnescapeString(presenter.Excerpt) p := bluemonday.UGCPolicy() sanitized := p.Sanitize(unescaped) unescaped = html.UnescapeString(sanitized) return template.HTML(unescaped) }
func (this *TopicController) View() { this.Data["IsLogin"] = checkAccount(this.Ctx) this.Data["IsTopic"] = true this.TplNames = "topic_view.html" topic, err := models.GetTopic(this.Ctx.Input.Param("0")) if err != nil { beego.Error(err) this.Redirect("/", 302) return } tid := this.Ctx.Input.Param("0") this.Data["Tid"] = tid this.Data["Tag"] = strings.Split(topic.Tag, ",") topic.Content = string(blackfriday.MarkdownCommon([]byte(topic.Content))) this.Data["Topic"] = topic replies, err := models.GetAllReplies(tid) if err != nil { beego.Error(err) return } for _, reply := range replies { unsafe := blackfriday.MarkdownCommon([]byte(reply.Content)) reply.Content = string(bluemonday.UGCPolicy().SanitizeBytes(unsafe)) } this.Data["Replies"] = replies this.locale() }
func renderMarkdown(c *gin.Context, currentText string, title string, versions []versionsInfo, AdminKey string, totalTime time.Duration, encrypted bool, noprompt bool, locked bool, recentlyEdited []string) { originalText := currentText CodeType := getCodeType(title) if CodeType == "markdown" { CodeType = "" } r, _ := regexp.Compile("\\[\\[(.*?)\\]\\]") for _, s := range r.FindAllString(currentText, -1) { currentText = strings.Replace(currentText, s, "["+s[2:len(s)-2]+"](/"+s[2:len(s)-2]+"/view)", 1) } unsafe := blackfriday.MarkdownCommon([]byte(currentText)) pClean := bluemonday.UGCPolicy() pClean.AllowElements("img") pClean.AllowAttrs("alt").OnElements("img") pClean.AllowAttrs("src").OnElements("img") pClean.AllowAttrs("class").OnElements("a") pClean.AllowAttrs("href").OnElements("a") pClean.AllowAttrs("id").OnElements("a") pClean.AllowDataURIImages() html := pClean.SanitizeBytes(unsafe) html2 := string(html) r, _ = regexp.Compile("\\$\\$(.*?)\\$\\$") for _, s := range r.FindAllString(html2, -1) { html2 = strings.Replace(html2, s, "<span class='texp' data-expr='"+s[2:len(s)-2]+"'></span>", 1) } r, _ = regexp.Compile("\\$(.*?)\\$") for _, s := range r.FindAllString(html2, -1) { html2 = strings.Replace(html2, s, "<span class='texi' data-expr='"+s[1:len(s)-1]+"'></span>", 1) } html2 = strings.Replace(html2, "&#36;", "$", -1) html2 = strings.Replace(html2, "&#91;", "[", -1) html2 = strings.Replace(html2, "&#93;", "]", -1) html2 = strings.Replace(html2, "&35;", "#", -1) totalTimeString := totalTime.String() if totalTime.Seconds() < 1 { totalTimeString = "< 1 s" } if encrypted { CodeType = "asciiarmor" } c.HTML(http.StatusOK, "view.tmpl", gin.H{ "Title": title, "WikiName": RuntimeArgs.WikiName, "Body": template.HTML([]byte(html2)), "CurrentText": originalText, "Versions": versions, "TotalTime": totalTimeString, "AdminKey": AdminKey, "Encrypted": encrypted, "Locked": locked, "Prompt": noprompt, "LockedOrEncrypted": locked || encrypted, "Coding": len(CodeType) > 0, "CodeType": CodeType, "RecentlyEdited": recentlyEdited, }) }
func saveAllDir(db *mgo.Database, path string) { list, _ := ioutil.ReadDir(path) for _, f := range list { if f.IsDir() && strings.HasPrefix(f.Name(), "2014") { fmt.Println(f.Name()) saveAllDir(db, filepath.Join(path, f.Name())) } else if strings.HasSuffix(f.Name(), ".md") && (strings.HasPrefix(f.Name(), "2015") || strings.HasPrefix(f.Name(), "2014")) { file, err := os.Open(filepath.Join(path, f.Name())) if err != nil { fmt.Println(err) return } defer file.Close() contents, err := ioutil.ReadAll(file) if err != nil { fmt.Println(err) return } unsafe := blackfriday.MarkdownBasic(contents) html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) // fmt.Println(string(html)) doc, err := goquery.NewDocumentFromReader(bytes.NewReader(html)) langs := make(map[string]Repos) doc.Find("h4").Each(func(i int, hs *goquery.Selection) { var repos Repos hs.NextUntil("h4").Find("li").Each(func(i int, s *goquery.Selection) { repo := &Repo{} repo.URL, _ = s.Find("a").Attr("href") repo.Name = strings.Replace(strings.Replace(s.Find("a").Text(), " ", "", -1), "\n", "", -1) // repo.Description = parts := strings.Split(s.Text(), ":") repo.Description = strings.TrimSpace(strings.Replace(strings.Join(parts[1:], ""), "\n", "", -1)) img, ok := s.Find("img").Attr("src") if ok { repo.BuiltBy = Contributors{Contributor{ Avatar: img, // Username: "******", }} } repos = append(repos, repo) }) fmt.Println("len:", len(repos)) langs[strings.Title(hs.Text())] = repos }) _, filename := filepath.Split(file.Name()) ymd := strings.Split(strings.TrimSuffix(filename, ".md"), "-") s := &Snapshot{ Date: fmt.Sprintf("%s-%s-%s", ymd[2], ymd[1], ymd[0]), Languages: langs, } s.Save(db) } } }
// Markdown will sanitize the content and return markdown html func Markdown(content string) template.HTML { // make the post formatted with markdown unsafe := blackfriday.MarkdownCommon([]byte(content)) // sanitize the input html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) // convert to template format return template.HTML(html) }
func BuildSanitizer() (p *bluemonday.Policy) { p = bluemonday.UGCPolicy() p.AllowAttrs("class").Matching(regexp.MustCompile(`[\p{L}\p{N}\s\-_',:\[\]!\./\\\(\)&]*`)).OnElements("code") p.AllowAttrs("type").Matching(regexp.MustCompile(`^checkbox$`)).OnElements("input") p.AllowAttrs("checked", "disabled").OnElements("input") return p }
func getSanitizerPolicy() *bluemonday.Policy { p := bluemonday.UGCPolicy() p.AllowAttrs("data-plugin"). Matching(regexp.MustCompile(`[\p{L}\p{N}\s\-_',:\[\]!\./\\\(\)&]*`)).Globally() p.AllowAttrs("data-id"). Matching(regexp.MustCompile(`[\p{L}\p{N}\s\-_',:\[\]!\./\\\(\)&]*`)).Globally() return p }
// redirecthomeHandler redirects everyone home ("/") with a 301 redirect. func redirecthomeHandler(rw http.ResponseWriter, r *http.Request) { domain := getDomain(r) p := bluemonday.UGCPolicy() cleanURL := p.Sanitize(r.URL.Path) log.Printf("%q from %s hit %q on domain: %q", r.UserAgent(), r.RemoteAddr, cleanURL, domain) http.Redirect(rw, r, "/", 301) }
func generateHtml(path string) (html []byte, err error) { fileBytes, err := ioutil.ReadFile(path) if err != nil { return } unsafe := blackfriday.MarkdownCommon(fileBytes) html = bluemonday.UGCPolicy().SanitizeBytes(unsafe) return }
func Convert(file string) { content, err := ioutil.ReadFile(file) if err != nil { fmt.Println("ERROR : Cannot read file") return } output := blackfriday.MarkdownCommon(content) html := bluemonday.UGCPolicy().SanitizeBytes(output) fmt.Println(string(html)) }
func ParseEntry(e dropbox.FileMetadata, c []byte) *Article { article := extractEntryData(c) unsafe := blackfriday.MarkdownCommon(fixImagePaths(c)) article.Content = string(bluemonday.UGCPolicy().SanitizeBytes(unsafe)) article.Summary = ExtractSummary(article.Content) article.FileMetadata = e article.sanitizeArticleMetadata() article.ParseTimeStamp() return article }
// MarkDownToHTML takes markdown and returns html func MarkDownToHTML(markdown []byte) []byte { content := blackfriday.MarkdownCommon(markdown) html := bluemonday.UGCPolicy().SanitizeBytes(content) prefix := []byte("<div>") suffix := []byte("</div>") html = append(prefix, html...) html = append(html, suffix...) return html }
func (_ markdownRender) Render(rw http.ResponseWriter, code int, data ...interface{}) error { rw.Header().Set("Content-Type", ContentHTML+"; charset=utf-8") rw.WriteHeader(code) input := data[0].([]byte) unsafe := blackfriday.MarkdownCommon(input) html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) _, err := rw.Write(html) return err }
func (_ Test) BlueMonday() { p := bluemonday.UGCPolicy() html := p.Sanitize( `<a onblur="alert(secret)" href="http://www.google.com">Google</a>`, ) // Output: // <a href="http://www.google.com" rel="nofollow">Google</a> e.InfoLog.Println(html) }
func ToHtml(content string) template.HTML { htmlFlags := 0 htmlFlags |= blackfriday.HTML_USE_SMARTYPANTS htmlFlags |= blackfriday.HTML_SMARTYPANTS_FRACTIONS renderer := blackfriday.HtmlRenderer(htmlFlags, "", "") html := blackfriday.Markdown([]byte(content), renderer, enabled_md_extensions) return template.HTML(bluemonday.UGCPolicy().SanitizeBytes(html)) }
func (r Release) SanitisedBody() string { var i string if r.Body == nil { i = "" } else { i = *r.Body } s := bluemonday.UGCPolicy() b := md.Markdown([]byte(i)) return string(s.SanitizeBytes(b)) }
func main() { filename := "./README.md" file, _ := os.OpenFile(filename, os.O_RDONLY, 0644) defer file.Close() b, _ := ioutil.ReadAll(file) unsafe := blackfriday.MarkdownCommon(b) html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) of, _ := os.OpenFile("readme.html", os.O_CREATE|os.O_RDWR, 0666) defer of.Close() of.Write(html) }
func getHtml(markdownFile string) string { data, err := ioutil.ReadFile(markdownFile) if err != nil { fmt.Println("error reading file") os.Exit(1) } unsafe := blackfriday.MarkdownCommon(data) html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) return string(html) }