Пример #1
0
func (this *Lexicon) Analyze() {
	tokenizer := strutils.NewStrTokens(this.buffer.String())
outer:
	for tokenizer.HasMoreTokens() {
		token := tokenizer.NextToken()
		//keywords, separators, operators, literals, identifiers
		for _, k := range keywords {
			if k == token {
				this.keywords.Push(token)
				continue outer
			}
		}
		for _, s := range separators {
			if s == token {
				this.separators.Push(token)
				continue outer
			}
		}
		for _, o := range operators {
			if o == token {
				this.operators.Push(token)
				continue outer
			}
		}
		//check if literal
		_, err := strconv.Atof(token)
		if err == nil {
			this.literals.Push(token)
			continue outer
		}
		//if it reaches here, then it is an identifier
		this.identifiers.Push(token)
	}
}
Пример #2
0
func (this *Lexicon) AddSpaces() {
	tokenizer := strutils.NewStrTokens(this.buffer.String())
	buf := strutils.NewStringBuffer("")
	for tokenizer.HasMoreTokens() {
		next := strutils.NewStringBuffer(tokenizer.NextToken())
		for _, op := range operators {
			count := strings.Count(next.String(), op)
			index := 0
			for i := 0; i < count; i++ {
				index := strings.Index(next.String()[index:], op)
				next.Replace(index, index+len(op), " "+op+" ")
				index += 2
			}
		}
		for _, sp := range separators {
			count := strings.Count(next.String(), sp)
			index := 0
			for i := 0; i < count; i++ {
				index := strings.Index(next.String()[index:], sp)
				next.Replace(index, index+len(sp), " "+sp+" ")
				index += 2
			}
		}
		buf.Append(next.String() + " ")
	}
	this.buffer = buf
}