Ejemplo n.º 1
0
func (this *DefaultEngine) InitEngine() error {

	joinurl := fmt.Sprintf("http://%v:%v/v1/_join?addr=%v&mport=%v", this.MasterIP, this.MasterPort, this.LocalIP, this.LocalPort)

	this.Logger.Info("[INFO] url %v", joinurl)

	res, err := utils.RequestUrl(joinurl)
	if err != nil {
		this.Logger.Error("[ERROR] error %v", err)
	} else {
		this.Logger.Info("[INFO] RES %v", string(res))
	}

	return nil
}
Ejemplo n.º 2
0
func (this *Dispatcher) controlThread(netinfo *utils.NodeNetInfo) {

	for {

		select {
		case e1 := <-netinfo.IdxChan:
			this.Logger.Info("[INFO] indexNodeChan:: %v", e1)
			url := fmt.Sprintf("http://%v:%v/v1/_heart?type=addindex", netinfo.Addr, netinfo.MPort)
			post, e := json.Marshal(e1)
			if e != nil {
				this.Logger.Error("[ERROR] post json error %v", e)
			}
			res, err := utils.PostRequest(url, post)
			if err != nil {
				this.Logger.Error("[ERROR] error %v", err)
			} else {
				r := make(map[string]interface{})
				if err := json.Unmarshal(res, &r); err != nil {
					this.Logger.Error("[ERROR] json error %v", err)
				} else {
					this.Logger.Info("[INFO] Hearting Check ===> Addr:[%v:%v] Status : [%v]", netinfo.Addr, netinfo.MPort, r["_status"])
				}

			}

		case <-time.After(2 * time.Second):
			//this.Logger.Info("[INFO]  %v", time.Now())
			url := fmt.Sprintf("http://%v:%v/v1/_heart?type=alive", netinfo.Addr, netinfo.MPort)
			res, err := utils.RequestUrl(url)
			if err != nil {
				this.Logger.Error("[ERROR] error %v", err)
			} else {
				r := make(map[string]interface{})
				if err := json.Unmarshal(res, &r); err != nil {
					this.Logger.Error("[ERROR] json error %v", err)
				} else {
					this.Logger.Info("[INFO] Hearting Check ===> Addr:[%v:%v] Status : [%v]", netinfo.Addr, netinfo.MPort, r["_status"])
				}

			}

		}

	}

}
Ejemplo n.º 3
0
func (this *DefaultEngine) pullDetail(indexname string, shardnum uint64) error {
	//127.0.0.1:9990/v1/_pull?index=testidx&shardnum=2&start=0&lens=100
	start := uint64(0)
	lens := 1000
	idxname := fmt.Sprintf("%v_%v", indexname, shardnum)
	pullcount := 0
	for {
		time.Sleep(time.Second * 5)
		url := fmt.Sprintf("http://%v:%v/v1/_pull?index=%v&shardnum=%v&start=%v&lens=%v", this.MasterIP, this.MasterPort, indexname, shardnum, start, lens)
		this.Logger.Info("[INFO] Pull Detail  %v", url)
		res, err := utils.RequestUrl(url)
		if err != nil {
			this.Logger.Error("[ERROR] error %v", err)
			continue
		} else {
			var detail DetailRes
			err := json.Unmarshal(res, &detail)
			if err != nil || detail.ErrorCode != 0 {
				this.Logger.Error("[ERROR] error %v  errorCode : %v ", err, detail.ErrorCode)
				if pullcount >= 3 {
					this.mergeIndex(idxname)
					pullcount = 0
				}

				continue
			}
			pullcount++
			//this.Logger.Info("[INFO] pull Detail : %v", detail)
			this.Logger.Info("[INFO] MaxId %v", detail.MaxId)
			start = detail.MaxId

			var document map[string]string
			for _, doc := range detail.Detail {

				err := json.Unmarshal([]byte(doc), &document)
				if err != nil {
					continue
				}
				this.addDocument(idxname, document)
			}
			this.sync(idxname)

		}
	}

}
Ejemplo n.º 4
0
// Search function description : 搜索
// params :
// return :
func (this *DefaultEngine) Search(method string, parms map[string]string, body []byte) (string, error) {

	//this.Logger.Info("[INFO] DefaultEngine Search >>>>>>>>")

	startTime := time.Now()
	indexname, hasindex := parms["index"]
	ps, hasps := parms["ps"]
	pg, haspg := parms["pg"]
	req, _ := parms["_req"]
	_, hasforce := parms["_force"]
	shardn, hasshard := parms["_shard"]

	if !hasindex || !haspg || !hasps {
		return "", errors.New(eProcessoParms)
	}

	searchquerys := make([]utils.FSSearchQuery, 0)
	searchfilted := make([]utils.FSSearchFilted, 0)

	for field, value := range parms {
		if field == "cid" || field == "index" || field == "ps" || field == "pg" || field == "_shard" || field == "_force" || field == "_req" {
			continue
		}

		switch field[0] {
		case '-': //正向过滤
			value_list := strings.Split(value, ",")
			sf := utils.FSSearchFilted{FieldName: field[1:], Type: utils.FILT_EQ, Range: make([]int64, 0)}
			for _, v := range value_list {

				if valuenum, err := strconv.ParseInt(v, 0, 0); err == nil {
					sf.Range = append(sf.Range, valuenum)
				}
			}
			searchfilted = append(searchfilted, sf)

		case '_': //反向过滤
			value_list := strings.Split(value, ",")
			sf := utils.FSSearchFilted{FieldName: field[1:], Type: utils.FILT_NOT, Range: make([]int64, 0)}
			for _, v := range value_list {

				if valuenum, err := strconv.ParseInt(v, 0, 0); err == nil {
					sf.Range = append(sf.Range, valuenum)
				}
			}
			searchfilted = append(searchfilted, sf)

		default: //搜索
			//sf := utils.FSSearchFilted{FieldName: field, Type: utils.FILT_STR_PREFIX, RangeStr: make([]string, 0)}
			terms := utils.GSegmenter.Segment(value, true)
			if len(terms) == 0 {
				return eDefaultEngineNotFound, nil
			}
			//this.Logger.Info("[INFO] SegmentTerms >>>  %v ", terms)
			for _, term := range terms {
				var queryst utils.FSSearchQuery
				queryst.FieldName = field
				queryst.Value = term
				searchquerys = append(searchquerys, queryst)
			}

		}

	}
	nps, ok1 := strconv.ParseInt(ps, 0, 0)
	npg, ok2 := strconv.ParseInt(pg, 0, 0)
	if ok1 != nil || ok2 != nil {
		nps = 10
		npg = 1
	}

	if nps <= 0 {
		nps = 10
	}

	if npg <= 0 {
		npg = 1
	}

	var defaultResult DefaultResult
	var lens int64
	if hasforce && hasshard {
		idxname := fmt.Sprintf("%v_%v", indexname, shardn)
		docids, _ := this.idxManagers[idxname].SearchDocIds(searchquerys, searchfilted)
		this.Logger.Info("[INFO] RES FORCE LOCAL ::: %v", docids)
		lens = int64(len(docids))
		start := nps * (npg - 1)
		end := nps * npg
		if start >= lens {
			return eDefaultEngineNotFound, nil
		}
		if end >= lens {
			end = lens
		}

		defaultResult.Result = make([]map[string]string, 0)
		for _, docid := range docids[start:end] {
			val, ok := this.idxManagers[idxname].GetDocument(docid.Docid)
			if ok {
				defaultResult.Result = append(defaultResult.Result, val)
			}
		}

		utils.GiveDocIDsChan <- docids
	} else {
		//获取索引的分片
		if idxnode, ok := this.idxNodes[indexname]; ok {

			for shard := uint64(0); shard < idxnode.ShardNum; shard++ {
				flag := false
				for _, s := range idxnode.Shard {
					if s == shard {
						//indexer.SearchDocIds(searchquerys, searchfilted)
						idxname := fmt.Sprintf("%v_%v", indexname, shard)
						docids, _ := this.idxManagers[idxname].SearchDocIds(searchquerys, searchfilted)

						this.Logger.Info("[INFO] RES LOCAL ::: %v", docids)

						lens = int64(len(docids))
						start := nps * (npg - 1)
						end := nps * npg
						if start >= lens {
							return eDefaultEngineNotFound, nil
						}
						if end >= lens {
							end = lens
						}

						defaultResult.Result = make([]map[string]string, 0)
						for _, docid := range docids[start:end] {
							val, ok := this.idxManagers[idxname].GetDocument(docid.Docid)
							if ok {
								defaultResult.Result = append(defaultResult.Result, val)
							}
						}

						utils.GiveDocIDsChan <- docids

						flag = true
						break
					}

				}

				if !flag {
					addr := idxnode.ShardNodes[shard][0]
					url := fmt.Sprintf("http://%v%v&_force=1&_shard=%v", addr, req, shard)
					this.Logger.Info("[INFO] Req  %v", url)
					res, err := utils.RequestUrl(url)
					if err != nil {
						this.Logger.Error("[ERROR] error %v", err)
						continue
					}
					this.Logger.Info("[INFO] RES:: %v", string(res))
				}

			}

		} else {
			return eNoIndexname, nil
		}
	}

	endTime := time.Now()
	defaultResult.CostTime = fmt.Sprintf("%v", endTime.Sub(startTime))
	defaultResult.PageNum = npg
	defaultResult.PageSize = nps
	defaultResult.Status = "Found"
	defaultResult.TotalCount = lens

	r, err := json.Marshal(defaultResult)
	if err != nil {
		return eDefaultEngineNotFound, err
	}

	bh := (*reflect.SliceHeader)(unsafe.Pointer(&r))
	sh := reflect.StringHeader{bh.Data, bh.Len}
	return *(*string)(unsafe.Pointer(&sh)), nil

	/*
		indexer := this.idxManagers[indexname]
		docids, found := indexer.SearchDocIds(searchquerys, searchfilted)

		if !found {
			return eDefaultEngineNotFound, nil
		}

		lens := int64(len(docids))

		start := nps * (npg - 1)
		end := nps * npg

		if start >= lens {
			return eDefaultEngineNotFound, nil
		}

		if end >= lens {
			end = lens
		}

		var defaultResult DefaultResult

		defaultResult.Result = make([]map[string]string, 0)
		for _, docid := range docids[start:end] {
			val, ok := indexer.GetDocument(docid.Docid)
			if ok {

				accid, ok := val["account_id"]
				if !ok {
					this.Logger.Error("[ERROR]  account_id不存在")
					continue
				}

				if accstr, err1 := this.detail.Get(IAccount, fmt.Sprintf("%v.%v", cid, accid)); err1 == nil {

					var v map[string]string
					err := json.Unmarshal([]byte(accstr), &v)
					if err != nil {
						this.Logger.Error("[ERROR] json err %v", err)
					} else {
						val["media_username"] = v["media_username"]
					}

				}

				capid, hcapid := val["media_campaign_id"]
				adgid, hadgid := val["media_adgroup_id"]

				if (indexname == IKeyword || indexname == ICreative) && hadgid && hcapid {
					if capstr, err1 := this.detail.Get(ICampaign, fmt.Sprintf("%v.%v.%v", cid, accid, capid)); err1 == nil {
						if adgstr, err2 := this.detail.Get(IAdgroup, fmt.Sprintf("%v.%v.%v.%v", cid, accid, capid, adgid)); err2 == nil {
							var v map[string]string
							err := json.Unmarshal([]byte(adgstr), &v)
							if err != nil {
								this.Logger.Error("[ERROR] json err %v", err)
							} else {
								val["media_adgroup_name"] = v["media_adgroup_name"]
							}

						}

						var v map[string]string
						err := json.Unmarshal([]byte(capstr), &v)
						if err != nil {
							this.Logger.Error("[ERROR] json err %v", err)
						} else {
							val["media_campaign_name"] = v["media_campaign_name"]
						}

					}

				}

				if (indexname == IAdgroup) && hcapid {
					if capstr, err1 := this.detail.Get(ICampaign, fmt.Sprintf("%v.%v.%v", cid, accid, capid)); err1 == nil {

						var v map[string]string
						err := json.Unmarshal([]byte(capstr), &v)
						if err != nil {
							this.Logger.Error("[ERROR] json err %v", err)
						} else {
							val["media_campaign_name"] = v["media_campaign_name"]
						}

					}

				}

				defaultResult.Result = append(defaultResult.Result, val)
			}
		}
		utils.GiveDocIDsChan <- docids

		endTime := time.Now()
		defaultResult.CostTime = fmt.Sprintf("%v", endTime.Sub(startTime))
		defaultResult.PageNum = npg
		defaultResult.PageSize = nps
		defaultResult.Status = "Found"
		defaultResult.TotalCount = lens

		r, err := json.Marshal(defaultResult)
		if err != nil {
			return eDefaultEngineNotFound, err
		}

		bh := (*reflect.SliceHeader)(unsafe.Pointer(&r))
		sh := reflect.StringHeader{bh.Data, bh.Len}
		return *(*string)(unsafe.Pointer(&sh)), nil
	*/
	return "", nil

}
Ejemplo n.º 5
0
func (this *Spider) CrawlSync(url string) (string, error) {

	content, err := u.RequestUrl(url)
	//this.Logger.Info("[INFO] Crawl :%v , err :%v",url,err)
	return content, err
}