Exemple #1
0
// Root search represents the search's entrypoint via the DOM
// Delegations for different search methods are made upon here
//
// Root search is only executed for the first query in the query-chain
// All subsequent searches are based on a array of previous resulted tokens
func (q *Query) RootSearch() *searchutil.Result {
	for {
		// true by default
		success := true
		tokenType := q.tokenizer.Next()

		if tokenType == html.ErrorToken {
			break
		}

		token := q.tokenizer.Token()

		success = q.Match(token)

		if success == true {
			tokenChain := tokenutil.NewChainFromTokenizer(q.tokenizer)
			q.result.Add(tokenChain)
			// as suggested by GetTokenChainFromTokenizer() we research in the inner of the chain
			// for other matches
			q.TokenSearch(tokenChain)
		}
	}

	return q.result
}
Exemple #2
0
// Builds a clean prepared token chain for all test methods
func getTokenChain() *tokenutil.Chain {
	// example DOM
	dom := `<div class="myClass1"><b>myValue1</b></div><span class="myClass2">myValue2</span>`
	// tokenizer takes a io.Reader instance as arg
	tokenizer := html.NewTokenizer(strings.NewReader(dom))
	// simulate one next, as the chain expects a "used" tokenizer
	tokenizer.Next()
	// building a new chain from our tokenizer
	chain := tokenutil.NewChainFromTokenizer(tokenizer)

	return chain
}