Ejemplo n.º 1
0
func reindexAll() {
	reindexObjs := make([]indexer.Indexable, 0, 100)
	// We clear the index, *then* do the fetch because if
	// something comes in between the time we fetch the
	// objects to reindex and when it gets done, they'll
	// just be added naturally
	indexer.ClearIndex()

	for _, v := range client.AllClients() {
		reindexObjs = append(reindexObjs, v)
	}
	for _, v := range node.AllNodes() {
		reindexObjs = append(reindexObjs, v)
	}
	for _, v := range role.AllRoles() {
		reindexObjs = append(reindexObjs, v)
	}
	for _, v := range environment.AllEnvironments() {
		reindexObjs = append(reindexObjs, v)
	}
	defaultEnv, _ := environment.Get("_default")
	reindexObjs = append(reindexObjs, defaultEnv)
	// data bags have to be done separately
	dbags := databag.GetList()
	for _, db := range dbags {
		dbag, err := databag.Get(db)
		if err != nil {
			continue
		}
		dbis := make([]indexer.Indexable, dbag.NumDBItems())
		i := 0
		allDBItems, derr := dbag.AllDBItems()
		if derr != nil {
			logger.Errorf(derr.Error())
			continue
		}
		for _, k := range allDBItems {
			n := k
			dbis[i] = n
			i++
		}
		reindexObjs = append(reindexObjs, dbis...)
	}
	indexer.ReIndex(reindexObjs)
	return
}
Ejemplo n.º 2
0
func makeSearchItems() int {
	indexer.Initialize(config.Config)
	/* Gotta populate the search index */
	nodes := make([]*node.Node, 4)
	roles := make([]*role.Role, 4)
	envs := make([]*environment.ChefEnvironment, 4)
	clients := make([]*client.Client, 4)
	dbags := make([]*databag.DataBag, 4)
	gob.Register(new(node.Node))
	gob.Register(new(role.Role))
	gob.Register(new(environment.ChefEnvironment))
	gob.Register(new(client.Client))
	gob.Register(new(databag.DataBag))

	// circleci is sometimes weird about the index having everything. This
	// *never* comes up locally. ??? Could possibly be because the indexer
	// hasn't had a chance to finish indexing?
	reindexObjs := make([]indexer.Indexable, 0, 4*5)
	for i := 0; i < 4; i++ {
		nodes[i], _ = node.New(fmt.Sprintf("node%d", i))
		nodes[i].Default["baz"] = fmt.Sprintf("borb")
		nodes[i].Default["blurg"] = fmt.Sprintf("b%d", i)
		nodes[i].Save()
		roles[i], _ = role.New(fmt.Sprintf("role%d", i))
		roles[i].Save()
		envs[i], _ = environment.New(fmt.Sprintf("env%d", i))
		envs[i].Save()
		clients[i], _ = client.New(fmt.Sprintf("client%d", i))
		clients[i].Save()
		dbags[i], _ = databag.New(fmt.Sprintf("databag%d", i))
		dbags[i].Save()
		dbi := make(map[string]interface{})
		dbi["id"] = fmt.Sprintf("dbi%d", i)
		dbi["foo"] = fmt.Sprintf("dbag_item_%d", i)
		dbags[i].NewDBItem(dbi)
		reindexObjs = append(reindexObjs, nodes[i])
		reindexObjs = append(reindexObjs, roles[i])
		reindexObjs = append(reindexObjs, envs[i])
		reindexObjs = append(reindexObjs, clients[i])
		dbis, _ := dbags[i].AllDBItems()
		for _, d := range dbis {
			reindexObjs = append(reindexObjs, d)
		}
	}
	node1 = nodes[0]
	node2 = nodes[1]
	node3 = nodes[2]
	node4 = nodes[3]
	role1 = roles[0]
	role2 = roles[1]
	role3 = roles[2]
	role4 = roles[3]
	env1 = envs[0]
	env2 = envs[1]
	env3 = envs[2]
	env4 = envs[3]
	client1 = clients[0]
	client2 = clients[1]
	client3 = clients[2]
	client4 = clients[3]
	dbag1 = dbags[0]
	dbag2 = dbags[1]
	dbag3 = dbags[2]
	dbag4 = dbags[3]

	indexer.ClearIndex()
	indexer.ReIndex(reindexObjs)

	/* Make this function return something so the compiler's happy building
	 * the tests. */
	return 1
}
Ejemplo n.º 3
0
func reindexHandler(w http.ResponseWriter, r *http.Request) {
	w.Header().Set("Content-Type", "application/json")
	reindexResponse := make(map[string]interface{})
	opUser, oerr := actor.GetReqUser(r.Header.Get("X-OPS-USERID"))
	if oerr != nil {
		jsonErrorReport(w, r, oerr.Error(), oerr.Status())
		return
	}
	switch r.Method {
	case "POST":
		if !opUser.IsAdmin() {
			jsonErrorReport(w, r, "You are not allowed to perform that action.", http.StatusForbidden)
			return
		}
		reindexObjs := make([]indexer.Indexable, 0, 100)
		// We clear the index, *then* do the fetch because if
		// something comes in between the time we fetch the
		// objects to reindex and when it gets done, they'll
		// just be added naturally
		indexer.ClearIndex()

		for _, v := range client.AllClients() {
			reindexObjs = append(reindexObjs, v)
		}
		for _, v := range node.AllNodes() {
			reindexObjs = append(reindexObjs, v)
		}
		for _, v := range role.AllRoles() {
			reindexObjs = append(reindexObjs, v)
		}
		for _, v := range environment.AllEnvironments() {
			reindexObjs = append(reindexObjs, v)
		}
		defaultEnv, _ := environment.Get("_default")
		reindexObjs = append(reindexObjs, defaultEnv)
		// data bags have to be done separately
		dbags := databag.GetList()
		for _, db := range dbags {
			dbag, err := databag.Get(db)
			if err != nil {
				continue
			}
			dbis := make([]indexer.Indexable, dbag.NumDBItems())
			i := 0
			allDBItems, derr := dbag.AllDBItems()
			if derr != nil {
				logger.Errorf(derr.Error())
				continue
			}
			for _, k := range allDBItems {
				n := k
				dbis[i] = n
				i++
			}
			reindexObjs = append(reindexObjs, dbis...)
		}
		indexer.ReIndex(reindexObjs)
		reindexResponse["reindex"] = "OK"
	default:
		jsonErrorReport(w, r, "Method not allowed. If you're trying to do something with a data bag named 'reindex', it's not going to work I'm afraid.", http.StatusMethodNotAllowed)
		return
	}
	enc := json.NewEncoder(w)
	if err := enc.Encode(&reindexResponse); err != nil {
		jsonErrorReport(w, r, err.Error(), http.StatusInternalServerError)
	}
}