Example #1
0
func makeSearchItems() int {
	indexer.Initialize(config.Config)
	/* Gotta populate the search index */
	nodes := make([]*node.Node, 4)
	roles := make([]*role.Role, 4)
	envs := make([]*environment.ChefEnvironment, 4)
	clients := make([]*client.Client, 4)
	dbags := make([]*databag.DataBag, 4)
	gob.Register(new(node.Node))
	gob.Register(new(role.Role))
	gob.Register(new(environment.ChefEnvironment))
	gob.Register(new(client.Client))
	gob.Register(new(databag.DataBag))

	for i := 0; i < 4; i++ {
		nodes[i], _ = node.New(fmt.Sprintf("node%d", i))
		nodes[i].Default["baz"] = fmt.Sprintf("borb")
		nodes[i].Default["blurg"] = fmt.Sprintf("b%d", i)
		nodes[i].Save()
		roles[i], _ = role.New(fmt.Sprintf("role%d", i))
		roles[i].Save()
		envs[i], _ = environment.New(fmt.Sprintf("env%d", i))
		envs[i].Save()
		clients[i], _ = client.New(fmt.Sprintf("client%d", i))
		clients[i].Save()
		dbags[i], _ = databag.New(fmt.Sprintf("databag%d", i))
		dbags[i].Save()
		dbi := make(map[string]interface{})
		dbi["id"] = fmt.Sprintf("dbi%d", i)
		dbi["foo"] = fmt.Sprintf("dbag_item_%d", i)
		dbags[i].NewDBItem(dbi)
	}
	node1 = nodes[0]
	node2 = nodes[1]
	node3 = nodes[2]
	node4 = nodes[3]
	role1 = roles[0]
	role2 = roles[1]
	role3 = roles[2]
	role4 = roles[3]
	env1 = envs[0]
	env2 = envs[1]
	env3 = envs[2]
	env4 = envs[3]
	client1 = clients[0]
	client2 = clients[1]
	client3 = clients[2]
	client4 = clients[3]
	dbag1 = dbags[0]
	dbag2 = dbags[1]
	dbag3 = dbags[2]
	dbag4 = dbags[3]

	/* Make this function return something so the compiler's happy building
	 * the tests. */
	return 1
}
Example #2
0
func importAll(fileName string) error {
	fp, err := os.Open(fileName)
	if err != nil {
		return err
	}
	exportedData := &ExportData{}
	dec := json.NewDecoder(fp)
	if err := dec.Decode(&exportedData); err != nil {
		return err
	}

	// What versions of the exported data are supported?
	// At the moment it's only 1.0.

	if exportedData.MajorVersion == 1 && (exportedData.MinorVersion == 0 || exportedData.MinorVersion == 1) {
		logger.Infof("Importing data, version %d.%d created on %s", exportedData.MajorVersion, exportedData.MinorVersion, exportedData.CreatedTime)

		// load clients
		logger.Infof("Loading clients")
		for _, v := range exportedData.Data["client"] {
			c, err := client.NewFromJSON(v.(map[string]interface{}))
			if err != nil {
				return err
			}
			c.SetPublicKey(v.(map[string]interface{})["public_key"])
			gerr := c.Save()
			if gerr != nil {
				return gerr
			}
		}

		// load users
		logger.Infof("Loading users")
		for _, v := range exportedData.Data["user"] {
			pwhash, _ := v.(map[string]interface{})["password"].(string)
			v.(map[string]interface{})["password"] = ""
			u, err := user.NewFromJSON(v.(map[string]interface{}))
			if err != nil {
				return err
			}
			u.SetPasswdHash(pwhash)
			u.SetPublicKey(v.(map[string]interface{})["public_key"])
			gerr := u.Save()
			if gerr != nil {
				return gerr
			}
		}

		// load filestore
		logger.Infof("Loading filestore")
		for _, v := range exportedData.Data["filestore"] {
			fileData, err := base64.StdEncoding.DecodeString(v.(map[string]interface{})["Data"].(string))
			if err != nil {
				return err
			}
			fdBuf := bytes.NewBuffer(fileData)
			fdRc := ioutil.NopCloser(fdBuf)
			fs, err := filestore.New(v.(map[string]interface{})["Chksum"].(string), fdRc, int64(fdBuf.Len()))
			if err != nil {
				return err
			}
			if err = fs.Save(); err != nil {
				return err
			}
		}

		// load cookbooks
		logger.Infof("Loading cookbooks")
		for _, v := range exportedData.Data["cookbook"] {
			cb, err := cookbook.New(v.(map[string]interface{})["Name"].(string))
			if err != nil {
				return err
			}
			gerr := cb.Save()
			if gerr != nil {
				return gerr
			}
			for ver, cbvData := range v.(map[string]interface{})["Versions"].(map[string]interface{}) {
				cbvData, cerr := checkAttrs(cbvData.(map[string]interface{}))
				if cerr != nil {
					return cerr
				}
				_, cbverr := cb.NewVersion(ver, cbvData)
				if cbverr != nil {
					return cbverr
				}
			}
		}

		// load data bags
		logger.Infof("Loading data bags")
		for _, v := range exportedData.Data["data_bag"] {
			dbag, err := databag.New(v.(map[string]interface{})["Name"].(string))
			if err != nil {
				return err
			}
			gerr := dbag.Save()
			if gerr != nil {
				return gerr
			}
			for _, dbagData := range v.(map[string]interface{})["DataBagItems"].(map[string]interface{}) {
				_, dbierr := dbag.NewDBItem(dbagData.(map[string]interface{})["raw_data"].(map[string]interface{}))
				if dbierr != nil {
					return dbierr
				}
			}
			gerr = dbag.Save()
			if gerr != nil {
				return gerr
			}
		}
		// load environments
		logger.Infof("Loading environments")
		for _, v := range exportedData.Data["environment"] {
			envData, cerr := checkAttrs(v.(map[string]interface{}))
			if cerr != nil {
				return nil
			}
			if envData["name"].(string) != "_default" {
				e, err := environment.NewFromJSON(envData)
				if err != nil {
					return err
				}
				gerr := e.Save()
				if gerr != nil {
					return gerr
				}
			}
		}

		// load nodes
		logger.Infof("Loading nodes")
		for _, v := range exportedData.Data["node"] {
			nodeData, cerr := checkAttrs(v.(map[string]interface{}))
			if cerr != nil {
				return nil
			}
			n, err := node.NewFromJSON(nodeData)
			if err != nil {
				return err
			}
			gerr := n.Save()
			if gerr != nil {
				return gerr
			}
		}

		// load roles
		logger.Infof("Loading roles")
		for _, v := range exportedData.Data["role"] {
			roleData, cerr := checkAttrs(v.(map[string]interface{}))
			if cerr != nil {
				return nil
			}
			r, err := role.NewFromJSON(roleData)
			if err != nil {
				return err
			}
			gerr := r.Save()
			if gerr != nil {
				return gerr
			}
		}

		// load sandboxes
		logger.Infof("Loading sandboxes")
		for _, v := range exportedData.Data["sandbox"] {
			sbid, _ := v.(map[string]interface{})["Id"].(string)
			sbts, _ := v.(map[string]interface{})["CreationTime"].(string)
			sbcomplete, _ := v.(map[string]interface{})["Completed"].(bool)
			sbck, _ := v.(map[string]interface{})["Checksums"].([]interface{})
			sbTime, err := time.Parse(time.RFC3339, sbts)
			if err != nil {
				return err
			}
			sbChecksums := make([]string, len(sbck))
			for i, c := range sbck {
				sbChecksums[i] = c.(string)
			}
			sbox := &sandbox.Sandbox{ID: sbid, CreationTime: sbTime, Completed: sbcomplete, Checksums: sbChecksums}
			if err = sbox.Save(); err != nil {
				return err
			}
		}

		// load loginfos
		logger.Infof("Loading loginfo")
		for _, v := range exportedData.Data["loginfo"] {
			if err := loginfo.Import(v.(map[string]interface{})); err != nil {
				return err
			}
		}

		// load reports
		logger.Infof("Loading reports")
		for _, o := range exportedData.Data["report"] {
			// handle data exported from a bugged report export
			var nodeName string
			v := o.(map[string]interface{})
			if n, ok := v["node_name"]; ok {
				nodeName = n.(string)
			} else if n, ok := v["nodeName"]; ok {
				nodeName = n.(string)
			}
			v["action"] = "start"
			if st, ok := v["start_time"].(string); ok {
				t, err := time.Parse(time.RFC3339, st)
				if err != nil {
					return err
				}
				v["start_time"] = t.Format(report.ReportTimeFormat)
			}
			if et, ok := v["end_time"].(string); ok {
				t, err := time.Parse(time.RFC3339, et)
				if err != nil {
					return err
				}
				v["end_time"] = t.Format(report.ReportTimeFormat)
			}
			r, err := report.NewFromJSON(nodeName, v)
			if err != nil {
				return err
			}
			gerr := r.Save()
			if gerr != nil {
				return gerr
			}
			v["action"] = "end"
			if err := r.UpdateFromJSON(v); err != nil {
				return err
			}
			gerr = r.Save()
			if gerr != nil {
				return gerr
			}
		}

		if exportedData.MinorVersion == 1 {
			// import shovey jobs, run, and streams, and node
			// statuses
			logger.Infof("Loading node statuses...")
			for _, v := range exportedData.Data["node_status"] {
				ns := v.(map[string]interface{})
				err := node.ImportStatus(ns)
				if err != nil {
					return err
				}
			}
			logger.Infof("Loading shoveys...")
			for _, v := range exportedData.Data["shovey"] {
				s := v.(map[string]interface{})
				err := shovey.ImportShovey(s)
				if err != nil {
					return err
				}
			}
			logger.Infof("Loading shovey runs...")
			for _, v := range exportedData.Data["shovey_run"] {
				s := v.(map[string]interface{})
				err := shovey.ImportShoveyRun(s)
				if err != nil {
					return err
				}

			}
			logger.Infof("Loading shovey run streams...")
			for _, v := range exportedData.Data["shovey_run_stream"] {
				s := v.(map[string]interface{})
				err := shovey.ImportShoveyRunStream(s)
				if err != nil {
					return err
				}
			}
		}

	} else {
		err := fmt.Errorf("goiardi export data version %d.%d is not supported by this version of goiardi", exportedData.MajorVersion, exportedData.MinorVersion)
		return err
	}
	return nil
}
Example #3
0
func dataHandler(w http.ResponseWriter, r *http.Request) {
	w.Header().Set("Content-Type", "application/json")

	pathArray := splitPath(r.URL.Path)

	dbResponse := make(map[string]interface{})
	opUser, oerr := actor.GetReqUser(r.Header.Get("X-OPS-USERID"))
	if oerr != nil {
		jsonErrorReport(w, r, oerr.Error(), oerr.Status())
		return
	}

	if len(pathArray) == 1 {
		/* Either a list of data bags, or a POST to create a new one */
		switch r.Method {
		case "GET":
			if opUser.IsValidator() {
				jsonErrorReport(w, r, "You are not allowed to perform this action", http.StatusForbidden)
				return
			}
			/* The list */
			dbList := databag.GetList()
			for _, k := range dbList {
				dbResponse[k] = util.CustomURL(fmt.Sprintf("/data/%s", k))
			}
		case "POST":
			if !opUser.IsAdmin() {
				jsonErrorReport(w, r, "You are not allowed to perform this action", http.StatusForbidden)
				return
			}
			dbData, jerr := parseObjJSON(r.Body)
			if jerr != nil {
				jsonErrorReport(w, r, jerr.Error(), http.StatusBadRequest)
				return
			}
			/* check that the name exists */
			switch t := dbData["name"].(type) {
			case string:
				if t == "" {
					jsonErrorReport(w, r, "Field 'name' missing", http.StatusBadRequest)
					return
				}
			default:
				jsonErrorReport(w, r, "Field 'name' missing", http.StatusBadRequest)
				return
			}
			chefDbag, _ := databag.Get(dbData["name"].(string))
			if chefDbag != nil {
				httperr := fmt.Errorf("Data bag %s already exists.", dbData["name"].(string))
				jsonErrorReport(w, r, httperr.Error(), http.StatusConflict)
				return
			}
			chefDbag, nerr := databag.New(dbData["name"].(string))
			if nerr != nil {
				jsonErrorReport(w, r, nerr.Error(), nerr.Status())
				return
			}
			serr := chefDbag.Save()
			if serr != nil {
				jsonErrorReport(w, r, serr.Error(), http.StatusInternalServerError)
				return
			}
			if lerr := loginfo.LogEvent(opUser, chefDbag, "create"); lerr != nil {
				jsonErrorReport(w, r, lerr.Error(), http.StatusInternalServerError)
				return
			}
			dbResponse["uri"] = util.ObjURL(chefDbag)
			w.WriteHeader(http.StatusCreated)
		default:
			/* The chef-pedant spec wants this response for
			 * some reason. Mix it up, I guess. */
			w.Header().Set("Allow", "GET, POST")
			jsonErrorReport(w, r, "GET, POST", http.StatusMethodNotAllowed)
			return
		}
	} else {
		dbName := pathArray[1]

		/* chef-pedant is unhappy about not reporting the HTTP status
		 * as 404 by fetching the data bag before we see if the method
		 * is allowed, so do a quick check for that here. */
		if (len(pathArray) == 2 && r.Method == "PUT") || (len(pathArray) == 3 && r.Method == "POST") {
			var allowed string
			if len(pathArray) == 2 {
				allowed = "GET, POST, DELETE"
			} else {
				allowed = "GET, PUT, DELETE"
			}
			w.Header().Set("Allow", allowed)
			jsonErrorReport(w, r, "Method not allowed", http.StatusMethodNotAllowed)
			return
		}
		if opUser.IsValidator() || (!opUser.IsAdmin() && r.Method != "GET") {
			jsonErrorReport(w, r, "You are not allowed to perform this action", http.StatusForbidden)
			return
		}
		chefDbag, err := databag.Get(dbName)
		if err != nil {
			var errMsg string
			status := err.Status()
			if r.Method == "POST" {
				/* Posts get a special snowflake message */
				errMsg = fmt.Sprintf("No data bag '%s' could be found. Please create this data bag before adding items to it.", dbName)
			} else {
				if len(pathArray) == 3 {
					/* This is nuts. */
					if r.Method == "DELETE" {
						errMsg = fmt.Sprintf("Cannot load data bag %s item %s", dbName, pathArray[2])
					} else {
						errMsg = fmt.Sprintf("Cannot load data bag item %s for data bag %s", pathArray[2], dbName)
					}
				} else {
					errMsg = err.Error()
				}
			}
			jsonErrorReport(w, r, errMsg, status)
			return
		}
		if len(pathArray) == 2 {
			/* getting list of data bag items and creating data bag
			 * items. */
			switch r.Method {
			case "GET":

				for _, k := range chefDbag.ListDBItems() {
					dbResponse[k] = util.CustomObjURL(chefDbag, k)
				}
			case "DELETE":
				/* The chef API docs don't say anything
				 * about this existing, but it does,
				 * and without it you can't delete data
				 * bags at all. */
				dbResponse["chef_type"] = "data_bag"
				dbResponse["json_class"] = "Chef::DataBag"
				dbResponse["name"] = chefDbag.Name
				err := chefDbag.Delete()
				if err != nil {
					jsonErrorReport(w, r, err.Error(), http.StatusInternalServerError)
					return
				}
				if lerr := loginfo.LogEvent(opUser, chefDbag, "delete"); lerr != nil {
					jsonErrorReport(w, r, lerr.Error(), http.StatusInternalServerError)
					return
				}
			case "POST":
				rawData := databag.RawDataBagJSON(r.Body)
				dbitem, nerr := chefDbag.NewDBItem(rawData)
				if nerr != nil {
					jsonErrorReport(w, r, nerr.Error(), nerr.Status())
					return
				}
				if lerr := loginfo.LogEvent(opUser, dbitem, "create"); lerr != nil {
					jsonErrorReport(w, r, lerr.Error(), http.StatusInternalServerError)
					return
				}

				/* The data bag return values are all
				 * kinds of weird. Sometimes it sends
				 * just the raw data, sometimes it sends
				 * the whole object, sometimes a special
				 * snowflake version. Ugh. Have to loop
				 * through to avoid updating the pointer
				 * in the cache by just assigning
				 * dbitem.RawData to dbResponse. Urk.
				 */
				for k, v := range dbitem.RawData {
					dbResponse[k] = v
				}
				dbResponse["data_bag"] = dbitem.DataBagName
				dbResponse["chef_type"] = dbitem.ChefType
				w.WriteHeader(http.StatusCreated)
			default:
				w.Header().Set("Allow", "GET, DELETE, POST")
				jsonErrorReport(w, r, "GET, DELETE, POST", http.StatusMethodNotAllowed)
				return
			}
		} else {
			/* getting, editing, and deleting existing data bag items. */
			dbItemName := pathArray[2]
			if _, err := chefDbag.GetDBItem(dbItemName); err != nil {
				var httperr string
				if r.Method != "DELETE" {
					httperr = fmt.Sprintf("Cannot load data bag item %s for data bag %s", dbItemName, chefDbag.Name)
				} else {
					httperr = fmt.Sprintf("Cannot load data bag %s item %s", chefDbag.Name, dbItemName)
				}
				jsonErrorReport(w, r, httperr, http.StatusNotFound)
				return
			}
			switch r.Method {
			case "GET":
				dbi, err := chefDbag.GetDBItem(dbItemName)
				if err != nil {
					jsonErrorReport(w, r, err.Error(), http.StatusInternalServerError)
					return
				}
				dbResponse = dbi.RawData
			case "DELETE":
				dbi, err := chefDbag.GetDBItem(dbItemName)
				if err != nil {
					jsonErrorReport(w, r, err.Error(), http.StatusInternalServerError)
					return
				}
				/* Gotta short circuit this */
				enc := json.NewEncoder(w)
				if err := enc.Encode(&dbi); err != nil {
					jsonErrorReport(w, r, err.Error(), http.StatusInternalServerError)
					return
				}
				err = chefDbag.DeleteDBItem(dbItemName)
				if err != nil {
					jsonErrorReport(w, r, err.Error(), http.StatusInternalServerError)
					return
				}
				if lerr := loginfo.LogEvent(opUser, dbi, "delete"); lerr != nil {
					jsonErrorReport(w, r, lerr.Error(), http.StatusInternalServerError)
					return
				}
				return
			case "PUT":
				rawData := databag.RawDataBagJSON(r.Body)
				if rawID, ok := rawData["id"]; ok {
					switch rawID := rawID.(type) {
					case string:
						if rawID != dbItemName {
							jsonErrorReport(w, r, "DataBagItem name mismatch.", http.StatusBadRequest)
							return
						}
					default:
						jsonErrorReport(w, r, "Bad request", http.StatusBadRequest)
						return
					}
				}
				dbitem, err := chefDbag.UpdateDBItem(dbItemName, rawData)
				if err != nil {
					jsonErrorReport(w, r, err.Error(), http.StatusInternalServerError)
					return
				}
				if lerr := loginfo.LogEvent(opUser, dbitem, "modify"); lerr != nil {
					jsonErrorReport(w, r, lerr.Error(), http.StatusInternalServerError)
					return
				}
				/* Another weird data bag item response
				 * which isn't at all unusual. */
				for k, v := range dbitem.RawData {
					dbResponse[k] = v
				}
				dbResponse["data_bag"] = dbitem.DataBagName
				dbResponse["chef_type"] = dbitem.ChefType
				dbResponse["id"] = dbItemName
			default:
				w.Header().Set("Allow", "GET, DELETE, PUT")
				jsonErrorReport(w, r, "GET, DELETE, PUT", http.StatusMethodNotAllowed)
				return
			}
		}
	}

	enc := json.NewEncoder(w)
	if err := enc.Encode(&dbResponse); err != nil {
		jsonErrorReport(w, r, err.Error(), http.StatusInternalServerError)
	}
}
Example #4
0
func makeSearchItems() int {
	indexer.Initialize(config.Config)
	/* Gotta populate the search index */
	nodes := make([]*node.Node, 4)
	roles := make([]*role.Role, 4)
	envs := make([]*environment.ChefEnvironment, 4)
	clients := make([]*client.Client, 4)
	dbags := make([]*databag.DataBag, 4)
	gob.Register(new(node.Node))
	gob.Register(new(role.Role))
	gob.Register(new(environment.ChefEnvironment))
	gob.Register(new(client.Client))
	gob.Register(new(databag.DataBag))

	// circleci is sometimes weird about the index having everything. This
	// *never* comes up locally. ??? Could possibly be because the indexer
	// hasn't had a chance to finish indexing?
	reindexObjs := make([]indexer.Indexable, 0, 4*5)
	for i := 0; i < 4; i++ {
		nodes[i], _ = node.New(fmt.Sprintf("node%d", i))
		nodes[i].Default["baz"] = fmt.Sprintf("borb")
		nodes[i].Default["blurg"] = fmt.Sprintf("b%d", i)
		nodes[i].Save()
		roles[i], _ = role.New(fmt.Sprintf("role%d", i))
		roles[i].Save()
		envs[i], _ = environment.New(fmt.Sprintf("env%d", i))
		envs[i].Save()
		clients[i], _ = client.New(fmt.Sprintf("client%d", i))
		clients[i].Save()
		dbags[i], _ = databag.New(fmt.Sprintf("databag%d", i))
		dbags[i].Save()
		dbi := make(map[string]interface{})
		dbi["id"] = fmt.Sprintf("dbi%d", i)
		dbi["foo"] = fmt.Sprintf("dbag_item_%d", i)
		dbags[i].NewDBItem(dbi)
		reindexObjs = append(reindexObjs, nodes[i])
		reindexObjs = append(reindexObjs, roles[i])
		reindexObjs = append(reindexObjs, envs[i])
		reindexObjs = append(reindexObjs, clients[i])
		dbis, _ := dbags[i].AllDBItems()
		for _, d := range dbis {
			reindexObjs = append(reindexObjs, d)
		}
	}
	node1 = nodes[0]
	node2 = nodes[1]
	node3 = nodes[2]
	node4 = nodes[3]
	role1 = roles[0]
	role2 = roles[1]
	role3 = roles[2]
	role4 = roles[3]
	env1 = envs[0]
	env2 = envs[1]
	env3 = envs[2]
	env4 = envs[3]
	client1 = clients[0]
	client2 = clients[1]
	client3 = clients[2]
	client4 = clients[3]
	dbag1 = dbags[0]
	dbag2 = dbags[1]
	dbag3 = dbags[2]
	dbag4 = dbags[3]

	indexer.ClearIndex()
	indexer.ReIndex(reindexObjs)

	/* Make this function return something so the compiler's happy building
	 * the tests. */
	return 1
}