func (d *DataBrowserController) parseQuery(conn dbox.IConnection, dbrowser colonycore.DataBrowser, datacon *colonycore.Connection) (dbox.IQuery, error) { var dataQuery dbox.IQuery if dbrowser.QueryType == "nonQueryText" { dataQuery = conn.NewQuery().From(dbrowser.TableNames) } else if dbrowser.QueryType == "SQL" { if toolkit.HasMember(rdbms, datacon.Driver) { dataQuery = conn.NewQuery().Command("freequery", toolkit.M{}. Set("syntax", dbrowser.QueryText)) } else { return nil, errors.New("Free Text Query with SQL only for RDBMS, please use Dbox") } } else if dbrowser.QueryType == "Dbox" { queryInfo := toolkit.M{} toolkit.UnjsonFromString(dbrowser.QueryText, &queryInfo) toolkit.Println("queryinfo", queryInfo) if qFrom := queryInfo.Get("from", "").(string); qFrom != "" { dataQuery = conn.NewQuery() dataQuery = dataQuery.From(qFrom) } if qSelect := queryInfo.Get("select", "").(string); qSelect != "" { if qSelect != "*" { dataQuery = dataQuery.Select(strings.Split(qSelect, ",")...) } } } return dataQuery, nil }
func (d *DataSourceController) parseQuery(query dbox.IQuery, queryInfo toolkit.M) (dbox.IQuery, MetaSave) { metaSave := MetaSave{} if qFrom := queryInfo.Get("from", "").(string); qFrom != "" { query = query.From(qFrom) } if qSelect := queryInfo.Get("select", "").(string); qSelect != "" { if qSelect != "*" { query = query.Select(strings.Split(qSelect, ",")...) } } if qTakeRaw, qTakeOK := queryInfo["take"]; qTakeOK { if qTake, ok := qTakeRaw.(float64); ok { query = query.Take(int(qTake)) } if qTake, ok := qTakeRaw.(int); ok { query = query.Take(qTake) } } if qSkipRaw, qSkipOK := queryInfo["skip"]; qSkipOK { if qSkip, ok := qSkipRaw.(float64); ok { query = query.Take(int(qSkip)) } if qSkip, ok := qSkipRaw.(int); ok { query = query.Take(qSkip) } } if qOrder := queryInfo.Get("order", "").(string); qOrder != "" { orderAll := map[string]string{} err := json.Unmarshal([]byte(qOrder), &orderAll) if err == nil { orderString := []string{} for key, val := range orderAll { orderString = append(orderString, key) orderString = append(orderString, val) } query = query.Order(orderString...) } } if qInsert := queryInfo.Get("insert", "").(string); qInsert != "" { if qInsert != "" { metaSave.keyword = "insert" metaSave.data = qInsert query = query.Insert() } } if qUpdate := queryInfo.Get("update", "").(string); qUpdate != "" { if qUpdate != "" { metaSave.keyword = "update" metaSave.data = qUpdate query = query.Update() } } if _, qDeleteOK := queryInfo["delete"]; qDeleteOK { metaSave.keyword = "delete" query = query.Delete() } if qCommand := queryInfo.Get("command", "").(string); qCommand != "" { command := map[string]interface{}{} err := json.Unmarshal([]byte(qCommand), &command) if err == nil { for key, value := range command { query = query.Command(key, value) break } } } if qWhere := queryInfo.Get("where", "").(string); qWhere != "" { whereAll := []map[string]interface{}{} err := json.Unmarshal([]byte(qWhere), &whereAll) if err == nil { allFilter := []*dbox.Filter{} for _, each := range whereAll { where, _ := toolkit.ToM(each) filter := d.filterParse(where) if filter != nil { allFilter = append(allFilter, filter) } } query = query.Where(allFilter...) } } return query, metaSave }
func (g *GrabService) execService() { g.LastGrabStat = false go func(g *GrabService) { for g.ServiceRunningStat { if g.LastGrabStat { <-time.After(g.GrabInterval) } else { <-time.After(g.TimeOutInterval) } if !g.ServiceRunningStat { continue } g.ErrorNotes = "" g.LastGrabExe = time.Now() g.NextGrabExe = time.Now().Add(g.GrabInterval) g.LastGrabStat = true g.Log.AddLog(fmt.Sprintf("[%s] Grab Started %s", g.Name, g.Url), "INFO") g.GrabCount += 1 keySetting := []string{} switch g.SourceType { case SourceType_HttpHtml, SourceType_HttpJson: if e := g.ServGrabber.Grab(nil); e != nil { g.ErrorNotes = fmt.Sprintf("[%s] Grab Failed %s, repeat after %s :%s", g.Name, g.Url, g.TimeOutIntervalInfo, e) g.Log.AddLog(g.ErrorNotes, "ERROR") g.NextGrabExe = time.Now().Add(g.TimeOutInterval) g.LastGrabStat = false g.ErrorFound += 1 } else { g.Log.AddLog(fmt.Sprintf("[%s] Grab Success %s", g.Name, g.Url), "INFO") } for key, _ := range g.ServGrabber.Config.DataSettings { keySetting = append(keySetting, key) } // keySetting = g.sGrabber.Config.DataSettings case SourceType_DocExcel: // e = g.sGetData.ResultFromDatabase(key, &docs) // if e != nil { // g.LastGrabStat = false // } for key, _ := range g.ServGetData.CollectionSettings { keySetting = append(keySetting, key) } } // if e := g.ServGrabber.Grab(nil); e != nil { // g.ErrorNotes = fmt.Sprintf("[%s] Grab Failed %s, repeat after %s :%s", g.Name, g.Url, g.TimeOutIntervalInfo, e) // g.Log.AddLog(g.ErrorNotes, "ERROR") // g.NextGrabExe = time.Now().Add(g.TimeOutInterval) // g.LastGrabStat = false // g.ErrorFound += 1 // continue // } else { // g.Log.AddLog(fmt.Sprintf("[%s] Grab Success %s", g.Name, g.Url), "INFO") // } if g.LastGrabStat { for _, key := range keySetting { var e error g.Log.AddLog(fmt.Sprintf("[%s-%s] Fetch Data to destination started", g.Name, key), "INFO") docs := []toolkit.M{} switch g.SourceType { case SourceType_HttpHtml, SourceType_HttpJson: e = g.ServGrabber.ResultFromHtml(key, &docs) case SourceType_DocExcel: e = g.ServGetData.ResultFromDatabase(key, &docs) if e != nil { g.LastGrabStat = false } } if e != nil || !(g.LastGrabStat) { g.ErrorNotes = fmt.Sprintf("[%s-%s] Fetch Result Failed : ", g.Name, key, e) g.Log.AddLog(g.ErrorNotes, "ERROR") } e = g.DestDbox[key].IConnection.Connect() if e != nil { g.ErrorNotes = fmt.Sprintf("[%s-%s] Connect to destination failed [%s-%s]:%s", g.Name, key, g.DestDbox[key].Desttype, g.DestDbox[key].IConnection.Info().Host, e) g.Log.AddLog(g.ErrorNotes, "ERROR") } var q dbox.IQuery if g.DestDbox[key].Collection == "" { q = g.DestDbox[key].IConnection.NewQuery().SetConfig("multiexec", true).Save() } else { q = g.DestDbox[key].IConnection.NewQuery().SetConfig("multiexec", true).From(g.DestDbox[key].Collection).Save() } xN := 0 iN := 0 for _, doc := range docs { for key, val := range doc { doc[key] = strings.TrimSpace(fmt.Sprintf("%s", val)) } if g.DestDbox[key].Desttype == "mongo" { doc["_id"] = toolkit.GenerateRandomString("1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnpqrstuvwxyz", 32) } e = q.Exec(toolkit.M{ "data": doc, }) if g.DestDbox[key].Desttype == "mongo" { delete(doc, "_id") } if e != nil { g.ErrorNotes = fmt.Sprintf("[%s-%s] Unable to insert [%s-%s]:%s", g.Name, key, g.DestDbox[key].Desttype, g.DestDbox[key].IConnection.Info().Host, e) g.Log.AddLog(g.ErrorNotes, "ERROR") g.ErrorFound += 1 } else { iN += 1 } xN++ } g.RowGrabbed += xN q.Close() g.DestDbox[key].IConnection.Close() g.Log.AddLog(fmt.Sprintf("[%s-%s] Fetch Data to destination finished with %d record fetch", g.Name, key, xN), "INFO") if g.HistoryPath != "" && g.HistoryRecPath != "" { recfile := g.AddRecHistory(key, docs) historyservice := toolkit.M{}.Set("datasettingname", key).Set("grabdate", g.LastGrabExe).Set("rowgrabbed", g.RowGrabbed). Set("rowsaved", iN).Set("note", g.ErrorNotes).Set("grabstatus", "SUCCESS").Set("recfile", recfile) if !(g.LastGrabStat) { historyservice.Set("grabstatus", "FAILED") } g.AddHistory(historyservice) } } } else { if g.HistoryPath != "" { historyservice := toolkit.M{}.Set("datasettingname", "-").Set("grabdate", g.LastGrabExe).Set("rowgrabbed", g.RowGrabbed). Set("rowsaved", 0).Set("note", g.ErrorNotes).Set("grabstatus", "FAILED").Set("recfile", "") g.AddHistory(historyservice) } } } }(g) }
func (d *DataBrowserController) hasAggr(ctx dbox.IConnection, data *colonycore.DataBrowser, conn *colonycore.Connection) (*colonycore.DataBrowser, error) { var fieldArr, aggrArr []string var indexAggr []map[int]string var query dbox.IQuery fieldAggr := toolkit.M{} for i, v := range data.MetaData { if v.Aggregate != "" { result := toolkit.M{} toolkit.UnjsonFromString(v.Aggregate, &result) cursor := []toolkit.M{} if data.QueryType == "" { aggregate, e := d.dboxAggr(data.TableNames, v.Field, ctx, query, result, fieldAggr, cursor, conn) if e != nil { return nil, e } v.Aggregate = toolkit.JsonString(aggregate) } else if data.QueryType == "SQL" { names := map[int]string{} fieldArr = append(fieldArr, v.Field) if _, sumOK := result["SUM"]; sumOK { aggrArr = append(aggrArr, "SUM("+v.Field+")") if len(result) > 1 { indexAggr = append(indexAggr, map[int]string{i: "sum"}) } else { names[i] = "sum" } } if _, avgOK := result["AVG"]; avgOK { aggrArr = append(aggrArr, "AVG("+v.Field+")") if len(result) > 1 { indexAggr = append(indexAggr, map[int]string{i: "avg"}) } else { names[i] = "avg" } } if _, maxOK := result["MAX"]; maxOK { aggrArr = append(aggrArr, "MAX("+v.Field+")") if len(result) > 1 { indexAggr = append(indexAggr, map[int]string{i: "max"}) } else { names[i] = "max" } } if _, minOK := result["MIN"]; minOK { aggrArr = append(aggrArr, "MIN("+v.Field+")") if len(result) > 1 { indexAggr = append(indexAggr, map[int]string{i: "min"}) } else { names[i] = "min" } } if _, minOK := result["COUNT"]; minOK { aggrArr = append(aggrArr, "COUNT("+v.Field+")") if len(result) > 1 { indexAggr = append(indexAggr, map[int]string{i: "count"}) } else { names[i] = "count" } } if len(result) > 1 { fieldAggr.Set(v.Field, indexAggr) } else { fieldAggr.Set(v.Field, names) } } else if data.QueryType == "Dbox" { getQuery := toolkit.M{} toolkit.UnjsonFromString(data.QueryText, &getQuery) aggregate, e := d.dboxAggr(getQuery.Get("from").(string), v.Field, ctx, query, result, fieldAggr, cursor, conn) if e != nil { return nil, e } v.Aggregate = toolkit.JsonString(aggregate) } } } if data.QueryType == "SQL" { // fieldString := strings.Join(fieldArr, ", ") aggrString := strings.Join(aggrArr, ", ") var queryText string r := regexp.MustCompile(`(([Ff][Rr][Oo][Mm])) (?P<from>([a-zA-Z][_a-zA-Z]+[_a-zA-Z0-1].*))`) temparray := r.FindStringSubmatch(data.QueryText) sqlpart := toolkit.M{} for i, val := range r.SubexpNames() { if val != "" { sqlpart.Set(val, temparray[i]) } } if fromOK := sqlpart.Get("from", "").(string); fromOK != "" { queryText = toolkit.Sprintf("select %s FROM %s", aggrString, sqlpart.Get("from", "").(string)) // toolkit.Printf("queryString:%v\n", queryString) } query = ctx.NewQuery().Command("freequery", toolkit.M{}. Set("syntax", queryText)) csr, e := query.Cursor(nil) if e != nil { return nil, e } defer csr.Close() cursor := []toolkit.M{} e = csr.Fetch(&cursor, 0, false) if e != nil { return nil, e } for f, m := range fieldAggr { aggrData := toolkit.M{} for _, aggs := range cursor { for k, agg := range aggs { if toolkit.SliceLen(m) > 0 { for _, vals := range m.([]map[int]string) { for key, val := range vals { if strings.Contains(k, f) && strings.Contains(k, data.MetaData[key].Field) && strings.Contains(k, val) { aggrData.Set(val, agg) data.MetaData[key].Aggregate = toolkit.JsonString(aggrData) } } } } else { for key, val := range m.(map[int]string) { if strings.Contains(k, f) && strings.Contains(k, data.MetaData[key].Field) && strings.Contains(k, val) { aggrData.Set(val, agg) data.MetaData[key].Aggregate = toolkit.JsonString(aggrData) // toolkit.Printf("k:%v f:%v key:%v val:%v agg:%v\n", k, f, key, val, data.MetaData[key].Aggregate) } } } } } } } return data, nil }
func (d *DataBrowserController) dboxAggr(tblename string, field string, ctx dbox.IConnection, query dbox.IQuery, result, fieldAggr toolkit.M, cursor []toolkit.M, conn *colonycore.Connection) (toolkit.M, error) { aggregate := toolkit.M{} if conn.Driver == "mongo" { field = "$" + field } query = ctx.NewQuery().From(tblename) if result != nil { for k, _ := range result { if k == "SUM" { query = query.Aggr(dbox.AggrSum, field, "SUM") } if k == "AVG" { query = query.Aggr(dbox.AggrAvr, field, "AVG") } if k == "MAX" { query = query.Aggr(dbox.AggrMax, field, "MAX") } if k == "MIN" { query = query.Aggr(dbox.AggrMin, field, "MIN") } if conn.Driver == "mongo" { if k != "COUNT" { query = query.Group() } } csr, e := query.Cursor(nil) if e != nil { return nil, e } defer csr.Close() hasCount := []toolkit.M{} if k == "COUNT" { csr, e := query.Cursor(nil) if e != nil { return nil, e } defer csr.Close() count := csr.Count() hasCount = append(hasCount, aggregate.Set("count", count)) aggregate.Set("count", count) } else { e = csr.Fetch(&cursor, 0, false) if e != nil { return nil, e } } if _, countOK := aggregate["count"]; countOK { cursor = append(cursor, hasCount...) } for _, agg := range cursor { aggregate = agg if conn.Driver == "mongo" { for f, _ := range aggregate { if f == "_id" { aggregate.Unset(f) } } } fieldAggr.Set(field, aggregate) // toolkit.Printf("k:%v fieldArr:%v cursor:%v\n", k, field, fieldAggr) } } } return aggregate, nil }
func streamsavedata(intms <-chan toolkit.M, sQ dbox.IQuery, key string, dt toolkit.M) { var err error iN, note := 0, "" for intm := range intms { if destDboxs[key].desttype == "mongo" { intm.Set("_id", toolkit.GenerateRandomString("", 32)) } if len(intm) == 0 { continue } //Pre Execute Program if extCommand.Has("pre") && toolkit.ToString(extCommand["pre"]) != "" { sintm := toolkit.JsonString(intm) arrcmd := make([]string, 0, 0) // if runtime.GOOS == "windows" { // arrcmd = append(arrcmd, "cmd") // arrcmd = append(arrcmd, "/C") // } arrcmd = append(arrcmd, toolkit.ToString(extCommand["pre"])) arrcmd = append(arrcmd, sintm) // output, err := toolkit.RunCommand(arrcmd[0], arrcmd[1:]) output, err := toolkit.RunCommand(arrcmd[0], arrcmd[1]) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to execute pre external command :%s", key, err.Error()), "ERROR") note = "Error Found" continue } err = toolkit.UnjsonFromString(output, &intm) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to get pre external command output :%s", key, err.Error()), "ERROR") note = "Error Found" continue } } err = sQ.Exec(toolkit.M{ "data": intm, }) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to insert data [%s-%s]:%s", key, "csv", destDboxs[key].IConnection.Info().Host, err.Error()), "ERROR") note = "Error Found" continue } err = saverechistory(key, intm) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to insert record data [%s-%s]:%s", key, "csv", destDboxs[key].IConnection.Info().Host, err.Error()), "ERROR") note = "Error Found" } iN += 1 if math.Mod(float64(iN), 100) == 0 { _ = updatesnapshot(iN, key) dt = dt.Set("rowsaved", (toolkit.ToInt(dt.Get("rowsaved", 0), toolkit.RoundingAuto) + iN)) iN = 0 } //Post Execute Program if extCommand.Has("post") { sintm := toolkit.JsonString(intm) arrcmd := make([]string, 0, 0) // if runtime.GOOS == "windows" { // arrcmd = append(arrcmd, "cmd") // arrcmd = append(arrcmd, "/C") // } arrcmd = append(arrcmd, toolkit.ToString(extCommand["post"])) arrcmd = append(arrcmd, sintm) // output, err := toolkit.RunCommand(arrcmd[0], arrcmd[1:]) output, err := toolkit.RunCommand(arrcmd[0], arrcmd[1]) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to execute post external command :%s", key, err.Error()), "ERROR") note = "Error Found" continue } err = toolkit.UnjsonFromString(output, &intm) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to get post external command output :%s", key, err.Error()), "ERROR") note = "Error Found" continue } } } dt = dt.Set("note", note). Set("grabstatus", "done"). Set("rowsaved", (toolkit.ToInt(dt.Get("rowsaved", 0), toolkit.RoundingAuto) + iN)) _ = updatesnapshot(iN, key) err = savehistory(dt) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to save history : %s", key), "ERROR") } Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Finish save data", key), "INFO") destDboxs[key].IConnection.Close() }