Exemplo n.º 1
0
func CheckMarkdownLinks(reader *linereader.LineReader, file string) (err error) {
	// mmark.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters)
	htmlFlags := 0
	htmlFlags |= mmark.HTML_FOOTNOTE_RETURN_LINKS

	renderParameters := mmark.HtmlRendererParameters{
	//		FootnoteAnchorPrefix:       viper.GetString("FootnoteAnchorPrefix"),
	//		FootnoteReturnLinkContents: viper.GetString("FootnoteReturnLinkContents"),
	}

	renderer := &TestRenderer{
		LinkFrom: file,
		Renderer: mmark.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters),
	}

	extensions := 0 |
		//mmark.EXTENSION_NO_INTRA_EMPHASIS |
		mmark.EXTENSION_TABLES | mmark.EXTENSION_FENCED_CODE |
		mmark.EXTENSION_AUTOLINK |
		//mmark.EXTENSION_STRIKETHROUGH |
		mmark.EXTENSION_SPACE_HEADERS | mmark.EXTENSION_FOOTNOTES |
		mmark.EXTENSION_HEADER_IDS | mmark.EXTENSION_AUTO_HEADER_IDS //|
	//	mmark.EXTENSION_DEFINITION_LISTS

	//var output []byte
	buf := make([]byte, 1024*1024)
	length, err := reader.Read(buf)
	if length == 0 || err != nil {
		return err
	}
	data.VerboseLog("RUNNING Markdown on %s length(%d) - not counting frontmater\n", file, length)
	_ = mmark.Parse(buf, renderer, extensions)
	data.VerboseLog("FINISHED Markdown on %s\n", file)

	return nil
}
Exemplo n.º 2
0
func doCheckHugoFrontmatter(reader *linereader.LineReader, file string) (err error) {
	foundComment := false
	for err == nil {
		byteBuff, _, err := reader.ReadLine()
		if err != nil {
			return err
		}
		buff := string(byteBuff)
		if buff == "+++" {
			data.VerboseLog("Found TOML start")
			break
		}
		if strings.HasPrefix(buff, "<!--") {
			if !strings.HasSuffix(buff, "-->") {
				data.VerboseLog("found comment start")
				foundComment = true
				continue
			}
		}
		//data.VerboseLog("ReadLine: %s, %v, %s\n", string(byteBuff), isPrefix, err)
		for i := 0; i < len(buff); {
			runeValue, width := utf8.DecodeRuneInString(buff[i:])
			if unicode.IsSpace(runeValue) {
				i += width
			} else {
				data.VerboseLog("Unexpected non-whitespace char: %s", buff)
				return fmt.Errorf("Unexpected non-whitespace char: %s", buff)
			}
		}
	}

	data.AllFiles[file].Meta = make(map[string]string)

	// read lines until `+++` ending
	for err == nil {
		byteBuff, _, err := reader.ReadLine()
		if err != nil {
			return err
		}
		buff := string(byteBuff)
		if buff == "+++" {
			data.VerboseLog("Found TOML end")
			break
		}
		data.VerboseLog("\t%s\n", buff)

		meta := strings.SplitN(buff, "=", 2)
		data.VerboseLog("\t%d\t%v\n", len(meta), meta)
		if len(meta) == 2 {
			data.VerboseLog("\t\t%s: %s\n", meta[0], meta[1])
			data.AllFiles[file].Meta[strings.Trim(meta[0], " ")] = strings.Trim(meta[1], " ")
		}
	}
	// remove trailing close comment
	if foundComment {
		byteBuff, _, err := reader.ReadLine()
		if err != nil {
			return err
		}
		buff := string(byteBuff)
		data.VerboseLog("is this a comment? (%s)\n", buff)
		if strings.HasSuffix(buff, "-->") {
			if !strings.HasPrefix(buff, "<!--") {
				data.VerboseLog("found comment end\n")
				foundComment = false
			}
		}
		if foundComment {
			reader.UnreadLine(buff)
			return fmt.Errorf("Did not find expected close metadata comment")
		}
	}

	// ensure that the minimum metadata keys are set
	// ignore draft files
	if draft, ok := data.AllFiles[file].Meta["draft"]; !ok || draft != "true" {
		if _, ok := data.AllFiles[file].Meta["title"]; !ok {
			return fmt.Errorf("Did not find `title` metadata element")
		}
		if _, ok := data.AllFiles[file].Meta["description"]; !ok {
			return fmt.Errorf("Did not find `description` metadata element")
		}
		if _, ok := data.AllFiles[file].Meta["keywords"]; !ok {
			return fmt.Errorf("Did not find `keywords` metadata element")
		}
	}
	return nil
}