func (d *DataSourceController) parseQuery(query dbox.IQuery, queryInfo toolkit.M) (dbox.IQuery, MetaSave) {
	metaSave := MetaSave{}

	if qFrom := queryInfo.Get("from", "").(string); qFrom != "" {
		query = query.From(qFrom)
	}
	if qSelect := queryInfo.Get("select", "").(string); qSelect != "" {
		if qSelect != "*" {
			query = query.Select(strings.Split(qSelect, ",")...)
		}
	}
	if qTakeRaw, qTakeOK := queryInfo["take"]; qTakeOK {
		if qTake, ok := qTakeRaw.(float64); ok {
			query = query.Take(int(qTake))
		}
		if qTake, ok := qTakeRaw.(int); ok {
			query = query.Take(qTake)
		}
	}
	if qSkipRaw, qSkipOK := queryInfo["skip"]; qSkipOK {
		if qSkip, ok := qSkipRaw.(float64); ok {
			query = query.Take(int(qSkip))
		}
		if qSkip, ok := qSkipRaw.(int); ok {
			query = query.Take(qSkip)
		}
	}
	if qOrder := queryInfo.Get("order", "").(string); qOrder != "" {
		orderAll := map[string]string{}
		err := json.Unmarshal([]byte(qOrder), &orderAll)
		if err == nil {
			orderString := []string{}
			for key, val := range orderAll {
				orderString = append(orderString, key)
				orderString = append(orderString, val)
			}
			query = query.Order(orderString...)
		}
	}

	if qInsert := queryInfo.Get("insert", "").(string); qInsert != "" {
		if qInsert != "" {
			metaSave.keyword = "insert"
			metaSave.data = qInsert
			query = query.Insert()
		}
	}
	if qUpdate := queryInfo.Get("update", "").(string); qUpdate != "" {
		if qUpdate != "" {
			metaSave.keyword = "update"
			metaSave.data = qUpdate
			query = query.Update()
		}
	}
	if _, qDeleteOK := queryInfo["delete"]; qDeleteOK {
		metaSave.keyword = "delete"
		query = query.Delete()
	}
	if qCommand := queryInfo.Get("command", "").(string); qCommand != "" {
		command := map[string]interface{}{}
		err := json.Unmarshal([]byte(qCommand), &command)
		if err == nil {
			for key, value := range command {
				query = query.Command(key, value)
				break
			}
		}
	}

	if qWhere := queryInfo.Get("where", "").(string); qWhere != "" {
		whereAll := []map[string]interface{}{}
		err := json.Unmarshal([]byte(qWhere), &whereAll)
		if err == nil {
			allFilter := []*dbox.Filter{}

			for _, each := range whereAll {
				where, _ := toolkit.ToM(each)
				filter := d.filterParse(where)
				if filter != nil {
					allFilter = append(allFilter, filter)
				}
			}

			query = query.Where(allFilter...)
		}
	}

	return query, metaSave
}
Exemple #2
0
func (g *GrabService) execService() {
	g.LastGrabStat = false
	go func(g *GrabService) {
		for g.ServiceRunningStat {

			if g.LastGrabStat {
				<-time.After(g.GrabInterval)
			} else {
				<-time.After(g.TimeOutInterval)
			}

			if !g.ServiceRunningStat {
				continue
			}

			g.ErrorNotes = ""
			g.LastGrabExe = time.Now()
			g.NextGrabExe = time.Now().Add(g.GrabInterval)
			g.LastGrabStat = true
			g.Log.AddLog(fmt.Sprintf("[%s] Grab Started %s", g.Name, g.Url), "INFO")
			g.GrabCount += 1

			keySetting := []string{}
			switch g.SourceType {
			case SourceType_HttpHtml, SourceType_HttpJson:
				if e := g.ServGrabber.Grab(nil); e != nil {
					g.ErrorNotes = fmt.Sprintf("[%s] Grab Failed %s, repeat after %s :%s", g.Name, g.Url, g.TimeOutIntervalInfo, e)
					g.Log.AddLog(g.ErrorNotes, "ERROR")
					g.NextGrabExe = time.Now().Add(g.TimeOutInterval)
					g.LastGrabStat = false
					g.ErrorFound += 1
				} else {
					g.Log.AddLog(fmt.Sprintf("[%s] Grab Success %s", g.Name, g.Url), "INFO")
				}
				for key, _ := range g.ServGrabber.Config.DataSettings {
					keySetting = append(keySetting, key)
				}
				// keySetting = g.sGrabber.Config.DataSettings
			case SourceType_DocExcel:
				// e = g.sGetData.ResultFromDatabase(key, &docs)
				// if e != nil {
				// 	g.LastGrabStat = false
				// }
				for key, _ := range g.ServGetData.CollectionSettings {
					keySetting = append(keySetting, key)
				}
			}

			// if e := g.ServGrabber.Grab(nil); e != nil {
			// 	g.ErrorNotes = fmt.Sprintf("[%s] Grab Failed %s, repeat after %s :%s", g.Name, g.Url, g.TimeOutIntervalInfo, e)
			// 	g.Log.AddLog(g.ErrorNotes, "ERROR")
			// 	g.NextGrabExe = time.Now().Add(g.TimeOutInterval)
			// 	g.LastGrabStat = false
			// 	g.ErrorFound += 1
			// 	continue
			// } else {
			// 	g.Log.AddLog(fmt.Sprintf("[%s] Grab Success %s", g.Name, g.Url), "INFO")
			// }

			if g.LastGrabStat {
				for _, key := range keySetting {
					var e error
					g.Log.AddLog(fmt.Sprintf("[%s-%s] Fetch Data to destination started", g.Name, key), "INFO")

					docs := []toolkit.M{}
					switch g.SourceType {
					case SourceType_HttpHtml, SourceType_HttpJson:
						e = g.ServGrabber.ResultFromHtml(key, &docs)
					case SourceType_DocExcel:
						e = g.ServGetData.ResultFromDatabase(key, &docs)
						if e != nil {
							g.LastGrabStat = false
						}
					}
					if e != nil || !(g.LastGrabStat) {
						g.ErrorNotes = fmt.Sprintf("[%s-%s] Fetch Result Failed : ", g.Name, key, e)
						g.Log.AddLog(g.ErrorNotes, "ERROR")
					}

					// cannot insert data into output file, not sure why
					// solution: if the file is empty, then it's need to be deleted first
					if g.DestDbox[key].Desttype == "csv" {
						outputCSVpath := g.DestDbox[key].IConnection.Info().Host
						if toolkit.IsFileExist(outputCSVpath) {
							outputCSVcontentBytes, e := ioutil.ReadFile(outputCSVpath)
							if e != nil {
								g.ErrorNotes = fmt.Sprintf("[%s-%s] Connect to destination failed [%s-%s]:%s", g.Name, key, g.DestDbox[key].Desttype, g.DestDbox[key].IConnection.Info().Host, e)
								g.Log.AddLog(g.ErrorNotes, "ERROR")
							}

							if strings.TrimSpace(string(outputCSVcontentBytes)) == "" {
								os.Remove(outputCSVpath)
							}
						}
					}

					e = g.DestDbox[key].IConnection.Connect()
					if e != nil {
						g.ErrorNotes = fmt.Sprintf("[%s-%s] Connect to destination failed [%s-%s]:%s", g.Name, key, g.DestDbox[key].Desttype, g.DestDbox[key].IConnection.Info().Host, e)
						g.Log.AddLog(g.ErrorNotes, "ERROR")
					}

					var q dbox.IQuery
					if g.DestDbox[key].Collection == "" {
						q = g.DestDbox[key].IConnection.NewQuery().SetConfig("multiexec", true).Save()
					} else {
						q = g.DestDbox[key].IConnection.NewQuery().SetConfig("multiexec", true).From(g.DestDbox[key].Collection).Save()
					}

					if g.DestDbox[key].Desttype == "csv" {
						q = q.Insert()
					}

					xN := 0
					iN := 0
					for _, doc := range docs {
						for key, val := range doc {
							doc[key] = strings.TrimSpace(fmt.Sprintf("%s", val))
						}

						if g.DestDbox[key].Desttype == "mongo" {
							doc["_id"] = toolkit.GenerateRandomString("1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnpqrstuvwxyz", 32)
						}

						e = q.Exec(toolkit.M{
							"data": doc,
						})

						if g.DestDbox[key].Desttype == "mongo" {
							delete(doc, "_id")
						}

						if e != nil {
							g.ErrorNotes = fmt.Sprintf("[%s-%s] Unable to insert [%s-%s]:%s", g.Name, key, g.DestDbox[key].Desttype, g.DestDbox[key].IConnection.Info().Host, e)
							g.Log.AddLog(g.ErrorNotes, "ERROR")
							g.ErrorFound += 1
						} else {
							iN += 1
						}
						xN++
					}
					g.RowGrabbed += xN
					q.Close()
					g.DestDbox[key].IConnection.Close()

					g.Log.AddLog(fmt.Sprintf("[%s-%s] Fetch Data to destination finished with %d record fetch", g.Name, key, xN), "INFO")

					if g.HistoryPath != "" && g.HistoryRecPath != "" {
						recfile := g.AddRecHistory(key, docs)
						historyservice := toolkit.M{}.Set("datasettingname", key).Set("grabdate", g.LastGrabExe).Set("rowgrabbed", g.RowGrabbed).
							Set("rowsaved", iN).Set("note", g.ErrorNotes).Set("grabstatus", "SUCCESS").Set("recfile", recfile)
						if !(g.LastGrabStat) {
							historyservice.Set("grabstatus", "FAILED")
						}
						g.AddHistory(historyservice)
					}
				}
			} else {
				if g.HistoryPath != "" {
					historyservice := toolkit.M{}.Set("datasettingname", "-").Set("grabdate", g.LastGrabExe).Set("rowgrabbed", g.RowGrabbed).
						Set("rowsaved", 0).Set("note", g.ErrorNotes).Set("grabstatus", "FAILED").Set("recfile", "")
					g.AddHistory(historyservice)
				}
			}
		}
	}(g)
}