예제 #1
0
파일: old-tok.go 프로젝트: hachi8833/minima
func TokenizeOld(source string) []string {
	tokens, _ := lexer.Lex("\n("+source+")", token_exprs)
	toks := []string{}
	for _, v := range tokens {
		toks = append(toks, v.Text)
	}
	return toks
}
예제 #2
0
// This is where we handle all the new style rules, we transform to old style simply.
func Tokenize(source string) []string {
	source = clearNewlines(source)
	tokens, _ := lexer.Lex("(\n"+source+"\n)", token_exrps_clear)
	toks := []string{}
	last_ind := 0
	for i := 0; i < len(tokens); i++ {
		v := tokens[i]
		if v.Text == "\n" {
			var next_ind int
			if tokens[i+1].Text != "\t" {
				next_ind = 0
			} else {
				next_ind = tokens[i+1].Occ
			}
			diff := next_ind - last_ind
			last_ind = next_ind
			if len(tokens) != i+1 && i > 0 && tokens[i-1].Text != "(" {
				if diff <= 0 {
					toks = append(toks, ")")         // 1 implicit záró
					for i := 0; i < diff*(-1); i++ { // plusz amennyit csökken
						toks = append(toks, ")")
					}
				}
			}
			if len(toks) > 0 && len(tokens) > i+2 && tokens[i+2].Text != ")" {
				toks = append(toks, "(")
			}
		} else if v.Text == ";" {
			toks = append(toks, ")")
			toks = append(toks, "(")
		} else if v.Text != "\t" {
			toks = append(toks, v.Text)
		}
	}
	return toks
}