Esempio n. 1
0
// WrapAsEqualer breaks string into a slice of strings.
// Each string is then converted to <Token> to <Equaler>.
// []<Equaler> can then be pumped into the generic core.
// We could as well create slices of Equalers in the first place
// but first leads to a var farTooUglyLiteral =
//   []ls_core.Equaler{ls_core.Equaler(Token("trink")), ls_core.Equaler(Token("nicht"))}
func WrapAsEqualer(s string, sorted bool) []ls_core.Equaler {

	ss := stringspb.SplitByWhitespace(s)
	if sorted {
		sort.Strings(ss)

		// weed out doublettes
		su, prev := make([]string, 0, len(ss)), ""
		for _, v := range ss {
			if v == prev {
				continue
			}
			su = append(su, v)
			prev = v
		}
		ss = su

	}

	ret := make([]ls_core.Equaler, 0, len(ss))
	for _, v := range ss {
		cnv := ls_core.Equaler(Token(v))
		ret = append(ret, cnv)
	}
	return ret
}
Esempio n. 2
0
// See word.WrapAsEqualer
func WrapAsEqualer(sb []byte, sorted bool) []ls_core.Equaler {

	sbf := bytes.Fields(sb)
	if sorted {
		sort.Sort(sortBoB(sbf))

		// weed out doublettes
		su, prev := make([][]byte, 0, len(sbf)), []byte{}
		for _, v := range sbf {
			if bytes.Equal(v, prev) {
				continue
			}
			su = append(su, v)
			prev = v
		}
		sbf = su

	}

	ret := make([]ls_core.Equaler, 0, len(sbf))
	for _, v := range sbf {
		cnv := ls_core.Equaler(Token(v))
		ret = append(ret, cnv)
	}
	return ret
}
Esempio n. 3
0
// WrapAsEqualer wraps slice of tokens into interface type Equaler.
// Since our core implementation requires such slices.
func WrapAsEqualer(sl1 []Token) []ls_core.Equaler {
	var ret []ls_core.Equaler
	for _, v := range sl1 {
		cnv := ls_core.Equaler(v)
		ret = append(ret, cnv)
	}
	return ret
}