// Exists allows caller to check for the existance of a document using HEAD func Exists(index string, _type string, id string, args map[string]interface{}) (bool, error) { var url string query, err := api.QueryString(args) if err != nil { return false, err } if len(_type) > 0 { url = fmt.Sprintf("/%s/%s/%s?fields=_id", index, _type, id) } else { url = fmt.Sprintf("/%s/%s?fields=_id", index, id) } req, err := api.ElasticSearchRequest("HEAD", url, query) if err != nil { return false, err } httpStatusCode, _, err := req.Do(nil) if err != nil { return false, err } if httpStatusCode == http.StatusOK { return true, err } return false, err }
// Exists allows caller to check for the existance of a document using HEAD func Exists(pretty bool, index string, _type string, id string) (bool, error) { var url string var response map[string]interface{} if len(_type) > 0 { url = fmt.Sprintf("/%s/%s/%s?fields=_id%s", index, _type, id, api.Pretty(pretty)) } else { url = fmt.Sprintf("/%s/%s?fields=_id%s", index, id, api.Pretty(pretty)) } req, err := api.ElasticSearchRequest("HEAD", url) if err != nil { fmt.Println(err) } httpStatusCode, _, err := req.Do(&response) if err != nil { return false, err } if httpStatusCode == 404 { return false, err } else { return true, err } }
// ExistsIndex allows caller to check for the existance of an index or a type using HEAD func ExistsIndex(pretty bool, index string, _type string) (bool, error) { var url string if len(_type) > 0 { url = fmt.Sprintf("/%s/%s?%s", index, _type, api.Pretty(pretty)) } else { url = fmt.Sprintf("/%s?%s", index, api.Pretty(pretty)) } req, err := api.ElasticSearchRequest("HEAD", url) httpStatusCode, _, err := req.Do(nil) if err != nil { return false, err } if httpStatusCode == http.StatusOK { return true, err } else { return false, err } }
// This loads test data from github archives (~6700 docs) func LoadTestData() { docCt := 0 errCt := 0 indexer := NewBulkIndexer(1) indexer.BulkSender = func(buf *bytes.Buffer) error { // log.Printf("Sent %d bytes total %d docs sent", buf.Len(), docCt) req, err := api.ElasticSearchRequest("POST", "/_bulk", "") if err != nil { errCt += 1 log.Fatalf("ERROR: ", err) return err } req.SetBody(buf) // res, err := http.DefaultClient.Do(*(api.Request(req))) var response map[string]interface{} httpStatusCode, _, err := req.Do(&response) if err != nil { errCt += 1 log.Fatalf("ERROR: %v", err) return err } if httpStatusCode != 200 { log.Fatalf("Not 200! %d \n", httpStatusCode) } return err } done := make(chan bool) indexer.Run(done) resp, err := http.Get("http://data.githubarchive.org/2012-12-10-15.json.gz") if err != nil || resp == nil { panic("Could not download data") } defer resp.Body.Close() if err != nil { log.Println(err) return } gzReader, err := gzip.NewReader(resp.Body) defer gzReader.Close() if err != nil { panic(err) } r := bufio.NewReader(gzReader) var ge GithubEvent docsm := make(map[string]bool) h := md5.New() for { line, err := r.ReadBytes('\n') if err != nil && err != io.EOF { log.Println("FATAL: could not read line? ", err) } else if err != nil { indexer.Flush() break } if err := json.Unmarshal(line, &ge); err == nil { // create an "ID" h.Write(line) id := fmt.Sprintf("%x", h.Sum(nil)) if _, ok := docsm[id]; ok { log.Println("HM, already exists? ", ge.Url) } docsm[id] = true indexer.Index("github", ge.Type, id, "", &ge.Created, line, true) docCt++ } else { log.Println("ERROR? ", string(line)) } } if errCt != 0 { log.Println("FATAL, could not load ", errCt) } // lets wait a bit to ensure that elasticsearch finishes? time.Sleep(time.Second * 5) if len(docsm) != docCt { panic(fmt.Sprintf("Docs didn't match? %d:%d", len(docsm), docCt)) } }