Пример #1
0
func TestOrigTextAndTransformed(t *testing.T) {
	pattern := BuildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, true, true, []Range{}, Delimiter{}, []rune("jg"))
	tokens := Tokenize(util.RunesToChars([]rune("junegunn")), Delimiter{})
	trans := Transform(tokens, []Range{Range{1, 1}})

	origBytes := []byte("junegunn.choi")
	for _, extended := range []bool{false, true} {
		chunk := Chunk{
			&Item{
				text:        util.RunesToChars([]rune("junegunn")),
				origText:    &origBytes,
				transformed: trans},
		}
		pattern.extended = extended
		matches := pattern.matchChunk(&chunk, nil, slab) // No cache
		if !(matches[0].item.text.ToString() == "junegunn" &&
			string(*matches[0].item.origText) == "junegunn.choi" &&
			reflect.DeepEqual(matches[0].item.transformed, trans)) {
			t.Error("Invalid match result", matches)
		}

		match, offsets, pos := pattern.MatchItem(chunk[0], true, slab)
		if !(match.item.text.ToString() == "junegunn" &&
			string(*match.item.origText) == "junegunn.choi" &&
			offsets[0][0] == 0 && offsets[0][1] == 5 &&
			reflect.DeepEqual(match.item.transformed, trans)) {
			t.Error("Invalid match result", match, offsets, extended)
		}
		if !((*pos)[0] == 4 && (*pos)[1] == 0) {
			t.Error("Invalid pos array", *pos)
		}
	}
}
Пример #2
0
func (t *Terminal) printHeader() {
	if len(t.header) == 0 {
		return
	}
	max := t.window.Height
	var state *ansiState
	for idx, lineStr := range t.header {
		line := idx + 2
		if t.inlineInfo {
			line--
		}
		if line >= max {
			continue
		}
		trimmed, colors, newState := extractColor(lineStr, state, nil)
		state = newState
		item := &Item{
			text:   util.RunesToChars([]rune(trimmed)),
			colors: colors}

		t.move(line, 2, true)
		t.printHighlighted(&Result{item: item},
			tui.AttrRegular, tui.ColHeader, tui.ColDefault, false)
	}
}
Пример #3
0
// Tokenize tokenizes the given string with the delimiter
func Tokenize(text util.Chars, delimiter Delimiter) []Token {
	if delimiter.str == nil && delimiter.regex == nil {
		// AWK-style (\S+\s*)
		tokens, prefixLength := awkTokenizer(text)
		return withPrefixLengths(tokens, prefixLength)
	}

	if delimiter.str != nil {
		return withPrefixLengths(text.Split(*delimiter.str), 0)
	}

	// FIXME performance
	var tokens []string
	if delimiter.regex != nil {
		str := text.ToString()
		for len(str) > 0 {
			loc := delimiter.regex.FindStringIndex(str)
			if loc == nil {
				loc = []int{0, len(str)}
			}
			last := util.Max(loc[1], 1)
			tokens = append(tokens, str[:last])
			str = str[last:]
		}
	}
	asRunes := make([]util.Chars, len(tokens))
	for i, token := range tokens {
		asRunes[i] = util.RunesToChars([]rune(token))
	}
	return withPrefixLengths(asRunes, 0)
}
Пример #4
0
func TestDelimiterRegexRegex(t *testing.T) {
	delim := delimiterRegexp("--\\*")
	tokens := Tokenize(util.RunesToChars([]rune("-*--*---**---")), delim)
	if delim.str != nil ||
		tokens[0].text.ToString() != "-*--*" ||
		tokens[1].text.ToString() != "---*" ||
		tokens[2].text.ToString() != "*---" {
		t.Errorf("%s %d", tokens, len(tokens))
	}
}
Пример #5
0
// Match length, string length, index
func TestResultRank(t *testing.T) {
	// FIXME global
	sortCriteria = []criterion{byScore, byLength}

	strs := [][]rune{[]rune("foo"), []rune("foobar"), []rune("bar"), []rune("baz")}
	item1 := buildResult(&Item{text: util.RunesToChars(strs[0]), index: 1}, []Offset{}, 2, 3)
	if item1.rank.points[0] != math.MaxUint16-2 || // Bonus
		item1.rank.points[1] != 3 || // Length
		item1.rank.points[2] != 0 || // Unused
		item1.rank.points[3] != 0 || // Unused
		item1.item.index != 1 {
		t.Error(item1.rank)
	}
	// Only differ in index
	item2 := buildResult(&Item{text: util.RunesToChars(strs[0])}, []Offset{}, 2, 3)

	items := []*Result{item1, item2}
	sort.Sort(ByRelevance(items))
	if items[0] != item2 || items[1] != item1 {
		t.Error(items)
	}

	items = []*Result{item2, item1, item1, item2}
	sort.Sort(ByRelevance(items))
	if items[0] != item2 || items[1] != item2 ||
		items[2] != item1 || items[3] != item1 {
		t.Error(items, item1, item1.item.index, item2, item2.item.index)
	}

	// Sort by relevance
	item3 := buildResult(&Item{index: 2}, []Offset{Offset{1, 3}, Offset{5, 7}}, 3, 0)
	item4 := buildResult(&Item{index: 2}, []Offset{Offset{1, 2}, Offset{6, 7}}, 4, 0)
	item5 := buildResult(&Item{index: 2}, []Offset{Offset{1, 3}, Offset{5, 7}}, 5, 0)
	item6 := buildResult(&Item{index: 2}, []Offset{Offset{1, 2}, Offset{6, 7}}, 6, 0)
	items = []*Result{item1, item2, item3, item4, item5, item6}
	sort.Sort(ByRelevance(items))
	if !(items[0] == item6 && items[1] == item5 &&
		items[2] == item4 && items[3] == item3 &&
		items[4] == item2 && items[5] == item1) {
		t.Error(items, item1, item2, item3, item4, item5, item6)
	}
}
Пример #6
0
func TestTransform(t *testing.T) {
	input := "  abc:  def:  ghi:  jkl"
	{
		tokens := Tokenize(util.RunesToChars([]rune(input)), Delimiter{})
		{
			ranges := splitNth("1,2,3")
			tx := Transform(tokens, ranges)
			if string(joinTokens(tx)) != "abc:  def:  ghi:  " {
				t.Errorf("%s", tx)
			}
		}
		{
			ranges := splitNth("1..2,3,2..,1")
			tx := Transform(tokens, ranges)
			if string(joinTokens(tx)) != "abc:  def:  ghi:  def:  ghi:  jklabc:  " ||
				len(tx) != 4 ||
				tx[0].text.ToString() != "abc:  def:  " || tx[0].prefixLength != 2 ||
				tx[1].text.ToString() != "ghi:  " || tx[1].prefixLength != 14 ||
				tx[2].text.ToString() != "def:  ghi:  jkl" || tx[2].prefixLength != 8 ||
				tx[3].text.ToString() != "abc:  " || tx[3].prefixLength != 2 {
				t.Errorf("%s", tx)
			}
		}
	}
	{
		tokens := Tokenize(util.RunesToChars([]rune(input)), delimiterRegexp(":"))
		{
			ranges := splitNth("1..2,3,2..,1")
			tx := Transform(tokens, ranges)
			if string(joinTokens(tx)) != "  abc:  def:  ghi:  def:  ghi:  jkl  abc:" ||
				len(tx) != 4 ||
				tx[0].text.ToString() != "  abc:  def:" || tx[0].prefixLength != 0 ||
				tx[1].text.ToString() != "  ghi:" || tx[1].prefixLength != 12 ||
				tx[2].text.ToString() != "  def:  ghi:  jkl" || tx[2].prefixLength != 6 ||
				tx[3].text.ToString() != "  abc:" || tx[3].prefixLength != 0 {
				t.Errorf("%s", tx)
			}
		}
	}
}
Пример #7
0
func replacePlaceholder(template string, stripAnsi bool, delimiter Delimiter, query string, items []*Item) string {
	return placeholder.ReplaceAllStringFunc(template, func(match string) string {
		// Escaped pattern
		if match[0] == '\\' {
			return match[1:]
		}

		// Current query
		if match == "{q}" {
			return quoteEntry(query)
		}

		replacements := make([]string, len(items))

		if match == "{}" {
			for idx, item := range items {
				replacements[idx] = quoteEntry(item.AsString(stripAnsi))
			}
			return strings.Join(replacements, " ")
		}

		tokens := strings.Split(match[1:len(match)-1], ",")
		ranges := make([]Range, len(tokens))
		for idx, s := range tokens {
			r, ok := ParseRange(&s)
			if !ok {
				// Invalid expression, just return the original string in the template
				return match
			}
			ranges[idx] = r
		}

		for idx, item := range items {
			chars := util.RunesToChars([]rune(item.AsString(stripAnsi)))
			tokens := Tokenize(chars, delimiter)
			trans := Transform(tokens, ranges)
			str := string(joinTokens(trans))
			if delimiter.str != nil {
				str = strings.TrimSuffix(str, *delimiter.str)
			} else if delimiter.regex != nil {
				delims := delimiter.regex.FindAllStringIndex(str, -1)
				if len(delims) > 0 && delims[len(delims)-1][1] == len(str) {
					str = str[:delims[len(delims)-1][0]]
				}
			}
			str = strings.TrimSpace(str)
			replacements[idx] = quoteEntry(str)
		}
		return strings.Join(replacements, " ")
	})
}
Пример #8
0
func TestExact(t *testing.T) {
	defer clearPatternCache()
	clearPatternCache()
	pattern := BuildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, true, true,
		[]Range{}, Delimiter{}, []rune("'abc"))
	res, pos := algo.ExactMatchNaive(
		pattern.caseSensitive, pattern.forward, util.RunesToChars([]rune("aabbcc abc")), pattern.termSets[0][0].text, true, nil)
	if res.Start != 7 || res.End != 10 {
		t.Errorf("%s / %d / %d", pattern.termSets, res.Start, res.End)
	}
	if pos != nil {
		t.Errorf("pos is expected to be nil")
	}
}
Пример #9
0
func TestTokenize(t *testing.T) {
	// AWK-style
	input := "  abc:  def:  ghi  "
	tokens := Tokenize(util.RunesToChars([]rune(input)), Delimiter{})
	if tokens[0].text.ToString() != "abc:  " || tokens[0].prefixLength != 2 || tokens[0].trimLength != 4 {
		t.Errorf("%s", tokens)
	}

	// With delimiter
	tokens = Tokenize(util.RunesToChars([]rune(input)), delimiterRegexp(":"))
	if tokens[0].text.ToString() != "  abc:" || tokens[0].prefixLength != 0 || tokens[0].trimLength != 4 {
		t.Errorf("%s", tokens)
	}

	// With delimiter regex
	tokens = Tokenize(util.RunesToChars([]rune(input)), delimiterRegexp("\\s+"))
	if tokens[0].text.ToString() != "  " || tokens[0].prefixLength != 0 || tokens[0].trimLength != 0 ||
		tokens[1].text.ToString() != "abc:  " || tokens[1].prefixLength != 2 || tokens[1].trimLength != 4 ||
		tokens[2].text.ToString() != "def:  " || tokens[2].prefixLength != 8 || tokens[2].trimLength != 4 ||
		tokens[3].text.ToString() != "ghi  " || tokens[3].prefixLength != 14 || tokens[3].trimLength != 3 {
		t.Errorf("%s", tokens)
	}
}
Пример #10
0
func TestEqual(t *testing.T) {
	defer clearPatternCache()
	clearPatternCache()
	pattern := BuildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, true, true, []Range{}, Delimiter{}, []rune("^AbC$"))

	match := func(str string, sidxExpected int, eidxExpected int) {
		res, pos := algo.EqualMatch(
			pattern.caseSensitive, pattern.forward, util.RunesToChars([]rune(str)), pattern.termSets[0][0].text, true, nil)
		if res.Start != sidxExpected || res.End != eidxExpected {
			t.Errorf("%s / %d / %d", pattern.termSets, res.Start, res.End)
		}
		if pos != nil {
			t.Errorf("pos is expected to be nil")
		}
	}
	match("ABC", -1, -1)
	match("AbC", 0, 3)
}
Пример #11
0
func assertMatch(t *testing.T, fun Algo, caseSensitive, forward bool, input, pattern string, sidx int, eidx int, score int) {
	if !caseSensitive {
		pattern = strings.ToLower(pattern)
	}
	res, pos := fun(caseSensitive, forward, util.RunesToChars([]rune(input)), []rune(pattern), true, nil)
	var start, end int
	if pos == nil || len(*pos) == 0 {
		start = res.Start
		end = res.End
	} else {
		sort.Ints(*pos)
		start = (*pos)[0]
		end = (*pos)[len(*pos)-1] + 1
	}
	if start != sidx {
		t.Errorf("Invalid start index: %d (expected: %d, %s / %s)", start, sidx, input, pattern)
	}
	if end != eidx {
		t.Errorf("Invalid end index: %d (expected: %d, %s / %s)", end, eidx, input, pattern)
	}
	if res.Score != score {
		t.Errorf("Invalid score: %d (expected: %d, %s / %s)", res.Score, score, input, pattern)
	}
}
Пример #12
0
// Run starts fzf
func Run(opts *Options) {
	sort := opts.Sort > 0
	sortCriteria = opts.Criteria

	if opts.Version {
		fmt.Println(version)
		os.Exit(exitOk)
	}

	// Event channel
	eventBox := util.NewEventBox()

	// ANSI code processor
	ansiProcessor := func(data []byte) (util.Chars, *[]ansiOffset) {
		return util.ToChars(data), nil
	}
	ansiProcessorRunes := func(data []rune) (util.Chars, *[]ansiOffset) {
		return util.RunesToChars(data), nil
	}
	if opts.Ansi {
		if opts.Theme != nil {
			var state *ansiState
			ansiProcessor = func(data []byte) (util.Chars, *[]ansiOffset) {
				trimmed, offsets, newState := extractColor(string(data), state, nil)
				state = newState
				return util.RunesToChars([]rune(trimmed)), offsets
			}
		} else {
			// When color is disabled but ansi option is given,
			// we simply strip out ANSI codes from the input
			ansiProcessor = func(data []byte) (util.Chars, *[]ansiOffset) {
				trimmed, _, _ := extractColor(string(data), nil, nil)
				return util.RunesToChars([]rune(trimmed)), nil
			}
		}
		ansiProcessorRunes = func(data []rune) (util.Chars, *[]ansiOffset) {
			return ansiProcessor([]byte(string(data)))
		}
	}

	// Chunk list
	var chunkList *ChunkList
	header := make([]string, 0, opts.HeaderLines)
	if len(opts.WithNth) == 0 {
		chunkList = NewChunkList(func(data []byte, index int) *Item {
			if len(header) < opts.HeaderLines {
				header = append(header, string(data))
				eventBox.Set(EvtHeader, header)
				return nil
			}
			chars, colors := ansiProcessor(data)
			return &Item{
				index:  int32(index),
				text:   chars,
				colors: colors}
		})
	} else {
		chunkList = NewChunkList(func(data []byte, index int) *Item {
			tokens := Tokenize(util.ToChars(data), opts.Delimiter)
			trans := Transform(tokens, opts.WithNth)
			if len(header) < opts.HeaderLines {
				header = append(header, string(joinTokens(trans)))
				eventBox.Set(EvtHeader, header)
				return nil
			}
			textRunes := joinTokens(trans)
			item := Item{
				index:    int32(index),
				origText: &data,
				colors:   nil}

			trimmed, colors := ansiProcessorRunes(textRunes)
			item.text = trimmed
			item.colors = colors
			return &item
		})
	}

	// Reader
	streamingFilter := opts.Filter != nil && !sort && !opts.Tac && !opts.Sync
	if !streamingFilter {
		reader := Reader{func(data []byte) bool {
			return chunkList.Push(data)
		}, eventBox, opts.ReadZero}
		go reader.ReadSource()
	}

	// Matcher
	forward := true
	for _, cri := range opts.Criteria[1:] {
		if cri == byEnd {
			forward = false
			break
		}
		if cri == byBegin {
			break
		}
	}
	patternBuilder := func(runes []rune) *Pattern {
		return BuildPattern(
			opts.Fuzzy, opts.FuzzyAlgo, opts.Extended, opts.Case, forward,
			opts.Filter == nil, opts.Nth, opts.Delimiter, runes)
	}
	matcher := NewMatcher(patternBuilder, sort, opts.Tac, eventBox)

	// Filtering mode
	if opts.Filter != nil {
		if opts.PrintQuery {
			opts.Printer(*opts.Filter)
		}

		pattern := patternBuilder([]rune(*opts.Filter))

		found := false
		if streamingFilter {
			slab := util.MakeSlab(slab16Size, slab32Size)
			reader := Reader{
				func(runes []byte) bool {
					item := chunkList.trans(runes, 0)
					if item != nil {
						if result, _, _ := pattern.MatchItem(item, false, slab); result != nil {
							opts.Printer(item.text.ToString())
							found = true
						}
					}
					return false
				}, eventBox, opts.ReadZero}
			reader.ReadSource()
		} else {
			eventBox.Unwatch(EvtReadNew)
			eventBox.WaitFor(EvtReadFin)

			snapshot, _ := chunkList.Snapshot()
			merger, _ := matcher.scan(MatchRequest{
				chunks:  snapshot,
				pattern: pattern})
			for i := 0; i < merger.Length(); i++ {
				opts.Printer(merger.Get(i).item.AsString(opts.Ansi))
				found = true
			}
		}
		if found {
			os.Exit(exitOk)
		}
		os.Exit(exitNoMatch)
	}

	// Synchronous search
	if opts.Sync {
		eventBox.Unwatch(EvtReadNew)
		eventBox.WaitFor(EvtReadFin)
	}

	// Go interactive
	go matcher.Loop()

	// Terminal I/O
	terminal := NewTerminal(opts, eventBox)
	deferred := opts.Select1 || opts.Exit0
	go terminal.Loop()
	if !deferred {
		terminal.startChan <- true
	}

	// Event coordination
	reading := true
	ticks := 0
	eventBox.Watch(EvtReadNew)
	for {
		delay := true
		ticks++
		eventBox.Wait(func(events *util.Events) {
			defer events.Clear()
			for evt, value := range *events {
				switch evt {

				case EvtReadNew, EvtReadFin:
					reading = reading && evt == EvtReadNew
					snapshot, count := chunkList.Snapshot()
					terminal.UpdateCount(count, !reading)
					matcher.Reset(snapshot, terminal.Input(), false, !reading, sort)

				case EvtSearchNew:
					switch val := value.(type) {
					case bool:
						sort = val
					}
					snapshot, _ := chunkList.Snapshot()
					matcher.Reset(snapshot, terminal.Input(), true, !reading, sort)
					delay = false

				case EvtSearchProgress:
					switch val := value.(type) {
					case float32:
						terminal.UpdateProgress(val)
					}

				case EvtHeader:
					terminal.UpdateHeader(value.([]string))

				case EvtSearchFin:
					switch val := value.(type) {
					case *Merger:
						if deferred {
							count := val.Length()
							if opts.Select1 && count > 1 || opts.Exit0 && !opts.Select1 && count > 0 {
								deferred = false
								terminal.startChan <- true
							} else if val.final {
								if opts.Exit0 && count == 0 || opts.Select1 && count == 1 {
									if opts.PrintQuery {
										opts.Printer(opts.Query)
									}
									if len(opts.Expect) > 0 {
										opts.Printer("")
									}
									for i := 0; i < count; i++ {
										opts.Printer(val.Get(i).item.AsString(opts.Ansi))
									}
									if count > 0 {
										os.Exit(exitOk)
									}
									os.Exit(exitNoMatch)
								}
								deferred = false
								terminal.startChan <- true
							}
						}
						terminal.UpdateList(val)
					}
				}
			}
		})
		if delay && reading {
			dur := util.DurWithin(
				time.Duration(ticks)*coordinatorDelayStep,
				0, coordinatorDelayMax)
			time.Sleep(dur)
		}
	}
}
Пример #13
0
// Transform is used to transform the input when --with-nth option is given
func Transform(tokens []Token, withNth []Range) []Token {
	transTokens := make([]Token, len(withNth))
	numTokens := len(tokens)
	for idx, r := range withNth {
		parts := []*util.Chars{}
		minIdx := 0
		if r.begin == r.end {
			idx := r.begin
			if idx == rangeEllipsis {
				chars := util.RunesToChars(joinTokens(tokens))
				parts = append(parts, &chars)
			} else {
				if idx < 0 {
					idx += numTokens + 1
				}
				if idx >= 1 && idx <= numTokens {
					minIdx = idx - 1
					parts = append(parts, tokens[idx-1].text)
				}
			}
		} else {
			var begin, end int
			if r.begin == rangeEllipsis { // ..N
				begin, end = 1, r.end
				if end < 0 {
					end += numTokens + 1
				}
			} else if r.end == rangeEllipsis { // N..
				begin, end = r.begin, numTokens
				if begin < 0 {
					begin += numTokens + 1
				}
			} else {
				begin, end = r.begin, r.end
				if begin < 0 {
					begin += numTokens + 1
				}
				if end < 0 {
					end += numTokens + 1
				}
			}
			minIdx = util.Max(0, begin-1)
			for idx := begin; idx <= end; idx++ {
				if idx >= 1 && idx <= numTokens {
					parts = append(parts, tokens[idx-1].text)
				}
			}
		}
		// Merge multiple parts
		var merged util.Chars
		switch len(parts) {
		case 0:
			merged = util.RunesToChars([]rune{})
		case 1:
			merged = *parts[0]
		default:
			runes := []rune{}
			for _, part := range parts {
				runes = append(runes, part.ToRunes()...)
			}
			merged = util.RunesToChars(runes)
		}

		var prefixLength int32
		if minIdx < numTokens {
			prefixLength = tokens[minIdx].prefixLength
		} else {
			prefixLength = 0
		}
		transTokens[idx] = Token{&merged, prefixLength, int32(merged.TrimLength())}
	}
	return transTokens
}
Пример #14
0
func newItem(str string) *Item {
	bytes := []byte(str)
	trimmed, _, _ := extractColor(str, nil, nil)
	return &Item{origText: &bytes, text: util.RunesToChars([]rune(trimmed))}
}