コード例 #1
0
ファイル: old-tok.go プロジェクト: hachi8833/minima
func TokenizeOld(source string) []string {
	tokens, _ := lexer.Lex("\n("+source+")", token_exprs)
	toks := []string{}
	for _, v := range tokens {
		toks = append(toks, v.Text)
	}
	return toks
}
コード例 #2
0
ファイル: tokenize-parse.go プロジェクト: hachi8833/minima
// This is where we handle all the new style rules, we transform to old style simply.
func Tokenize(source string) []string {
	source = clearNewlines(source)
	tokens, _ := lexer.Lex("(\n"+source+"\n)", token_exrps_clear)
	toks := []string{}
	last_ind := 0
	for i := 0; i < len(tokens); i++ {
		v := tokens[i]
		if v.Text == "\n" {
			var next_ind int
			if tokens[i+1].Text != "\t" {
				next_ind = 0
			} else {
				next_ind = tokens[i+1].Occ
			}
			diff := next_ind - last_ind
			last_ind = next_ind
			if len(tokens) != i+1 && i > 0 && tokens[i-1].Text != "(" {
				if diff <= 0 {
					toks = append(toks, ")")         // 1 implicit záró
					for i := 0; i < diff*(-1); i++ { // plusz amennyit csökken
						toks = append(toks, ")")
					}
				}
			}
			if len(toks) > 0 && len(tokens) > i+2 && tokens[i+2].Text != ")" {
				toks = append(toks, "(")
			}
		} else if v.Text == ";" {
			toks = append(toks, ")")
			toks = append(toks, "(")
		} else if v.Text != "\t" {
			toks = append(toks, v.Text)
		}
	}
	return toks
}