func (q *Query) HasPartExec() error { var e error var lastJson []toolkit.M q.ReadFile(&lastJson, q.Connection().(*Connection).filePath) if toolkit.SliceLen(lastJson) > 0 { getWhere := []*dbox.Filter{} for _, v := range q.whereData { getWhere = []*dbox.Filter{v} i := dbox.Find(q.sliceData, getWhere) for idSlice, _ := range q.sliceData { if toolkit.HasMember(i, idSlice) { idata := dbox.Find(lastJson, getWhere) for idx, _ := range lastJson { if toolkit.HasMember(idata, idx) { lastJson[idx] = q.sliceData[idSlice] } } if toolkit.SliceLen(idata) == 0 { lastJson = append(lastJson, q.sliceData[idSlice]) // toolkit.Printf("idata>%v\n", q.sliceData[idSlice]) } } } } q.sliceData = lastJson } e = q.WriteFile(q.sliceData) if e != nil { return errorlib.Error(packageName, modQuery+".Exec", "HasPartExec", e.Error()) } return nil }
func MatchV(v interface{}, f *Filter) bool { match := false /* rv0 := reflect.ValueOf(v) if rv0.Kind() == reflect.Ptr { rv0 = reflect.Indirect(rv0) } rv1 := reflect.ValueOf(f.Value) if rv1.Kind()==reflect.Ptr{ rv1=reflect.Indirect(rv1) } */ if toolkit.HasMember([]interface{}{FilterOpEqual, FilterOpNoEqual, FilterOpGt, FilterOpGte, FilterOpLt, FilterOpLte}, f.Op) { return Compare(v, f.Value, f.Op) } else if f.Op == FilterOpIn { var values []interface{} toolkit.FromBytes(toolkit.ToBytes(f.Value, ""), "", &values) return toolkit.HasMember(values, v) } else if f.Op == FilterOpNin { var values []interface{} toolkit.FromBytes(toolkit.ToBytes(f.Value, ""), "", &values) return !toolkit.HasMember(values, v) } return match }
func (d *DataSourceController) DoFetchDataSourceMetaData(dataConn *colonycore.Connection, from string) (bool, []*colonycore.FieldInfo, error) { if err := d.checkIfDriverIsSupported(dataConn.Driver); err != nil { return false, nil, err } var conn dbox.IConnection conn, err := helper.ConnectUsingDataConn(dataConn).Connect() if err != nil { return false, nil, err } defer conn.Close() var query = conn.NewQuery().Take(1) if !toolkit.HasMember([]string{"csv", "json"}, dataConn.Driver) { query = query.From(from) } cursor, err := query.Cursor(nil) if err != nil { return false, nil, err } defer cursor.Close() data := toolkit.M{} var connDriver string if dataConn.Driver == "jdbc" { connDriver = strings.Split(dataConn.Settings.GetString("connector"), ":")[1] } else { connDriver = dataConn.Driver } if !toolkit.HasMember([]string{"json", "mysql"}, connDriver) { err = cursor.Fetch(&data, 1, false) } else { dataAll := []toolkit.M{} err = cursor.Fetch(&dataAll, 1, false) if err != nil { return false, []*colonycore.FieldInfo{}, errors.New("No data found") } if len(dataAll) > 0 { data = dataAll[0] } } metadata := d.parseMetadata(data) if err != nil { return false, nil, err } return true, metadata, nil }
func (q *Query) HasPartExec() error { var e error var lastJson []toolkit.M q.ReadFile(&lastJson, q.Connection().(*Connection).filePath) if toolkit.SliceLen(lastJson) > 0 { getWhere := []*dbox.Filter{} for _, v := range q.whereData { getWhere = []*dbox.Filter{v} i := dbox.Find(q.sliceData, getWhere) for idSlice := range q.sliceData { if toolkit.HasMember(i, idSlice) { idata := dbox.Find(lastJson, getWhere) for idx := range lastJson { if toolkit.HasMember(idata, idx) { lastJson[idx] = q.sliceData[idSlice] } } if toolkit.SliceLen(idata) == 0 { lastJson = append(lastJson, q.sliceData[idSlice]) } } } } q.sliceData = lastJson } else { idx := []int{} for _, v := range q.whereData { getWhere := []*dbox.Filter{v} idx = dbox.Find(q.sliceData, getWhere) } // toolkit.Printf("newdata>%v\n", idx) if toolkit.SliceLen(idx) > 1 { newdata := toolkit.M{} for idslice, dataslice := range q.sliceData { if toolkit.HasMember(idx, idslice) { idf, _ := toolkit.IdInfo(dataslice) newdata = q.sliceData[idslice] toolkit.CopyM(&dataslice, &newdata, false, []string{idf}) } } q.sliceData = []toolkit.M{} q.sliceData = append(q.sliceData, newdata) } } e = q.WriteFile(q.sliceData) if e != nil { return errorlib.Error(packageName, modQuery+".Exec", "HasPartExec", e.Error()) } return nil }
func MatchV(v interface{}, f *Filter) bool { match := false /* rv0 := reflect.ValueOf(v) if rv0.Kind() == reflect.Ptr { rv0 = reflect.Indirect(rv0) } rv1 := reflect.ValueOf(f.Value) if rv1.Kind()==reflect.Ptr{ rv1=reflect.Indirect(rv1) } */ //toolkit.Println("MatchV: ", f.Op, v, f.Value) if toolkit.HasMember([]string{FilterOpEqual, FilterOpNoEqual, FilterOpGt, FilterOpGte, FilterOpLt, FilterOpLte}, f.Op) { return toolkit.Compare(v, f.Value, f.Op) } else if f.Op == FilterOpIn { var values []interface{} toolkit.FromBytes(toolkit.ToBytes(f.Value, ""), "", &values) return toolkit.HasMember(values, v) } else if f.Op == FilterOpNin { var values []interface{} toolkit.FromBytes(toolkit.ToBytes(f.Value, ""), "", &values) return !toolkit.HasMember(values, v) } else if f.Op == FilterOpContains { var values []interface{} var b bool toolkit.FromBytes(toolkit.ToBytes(f.Value, ""), "", &values) for _, val := range values { // value := toolkit.Sprintf(".*%s.*", val.(string)) // b, _ = regexp.Match(value, []byte(v.(string))) r := regexp.MustCompile(`(?i)` + val.(string)) b = r.Match([]byte(v.(string))) if b { return true } } } else if f.Op == FilterOpStartWith || f.Op == FilterOpEndWith { value := "" if f.Op == FilterOpStartWith { value = toolkit.Sprintf("^%s.*$", f.Value) } else { value = toolkit.Sprintf("^.*%s$", f.Value) } cond, _ := regexp.Match(value, []byte(v.(string))) return cond } return match }
func extractData(data toolkit.M, driverName string) (string, string, string) { var attributes, setUpdate, values string if data != nil { var i int for field, val := range data { var datatypelist = []string{"map", "invalid", "struct", "slice"} var value reflect.Value if val != nil { value = reflect.Zero(reflect.TypeOf(val)) } if toolkit.HasMember(datatypelist, value.Kind().String()) { continue } stringValues := StringValue(val, driverName) if i == 0 { attributes = "(" + field setUpdate = field + " = " + stringValues values = "(" + stringValues } else { attributes += ", " + field setUpdate += ", " + field + " = " + stringValues values += ", " + stringValues } i += 1 } attributes += ")" values += ")" } return attributes, setUpdate, values }
func (d *DataBrowserController) parseQuery(conn dbox.IConnection, dbrowser colonycore.DataBrowser, datacon *colonycore.Connection) (dbox.IQuery, error) { var dataQuery dbox.IQuery if dbrowser.QueryType == "nonQueryText" { dataQuery = conn.NewQuery().From(dbrowser.TableNames) } else if dbrowser.QueryType == "SQL" { if toolkit.HasMember(rdbms, datacon.Driver) { dataQuery = conn.NewQuery().Command("freequery", toolkit.M{}. Set("syntax", dbrowser.QueryText)) } else { return nil, errors.New("Free Text Query with SQL only for RDBMS, please use Dbox") } } else if dbrowser.QueryType == "Dbox" { queryInfo := toolkit.M{} toolkit.UnjsonFromString(dbrowser.QueryText, &queryInfo) toolkit.Println("queryinfo", queryInfo) if qFrom := queryInfo.Get("from", "").(string); qFrom != "" { dataQuery = conn.NewQuery() dataQuery = dataQuery.From(qFrom) } if qSelect := queryInfo.Get("select", "").(string); qSelect != "" { if qSelect != "*" { dataQuery = dataQuery.Select(strings.Split(qSelect, ",")...) } } } return dataQuery, nil }
func extractDataBulk(attrList []string, data toolkit.M, driverName string) string { var values string if data != nil { for i, attr := range attrList { val := data.Get(attr) var datatypelist = []string{"map", "invalid", "struct", "slice"} var value reflect.Value if val != nil { value = reflect.Zero(reflect.TypeOf(val)) } if toolkit.HasMember(datatypelist, value.Kind().String()) { continue } stringValues := StringValue(val, driverName) if i == 0 { values = "(" + stringValues } else { values += ", " + stringValues } } values += ")" } return values }
func ConnectUsingDataConn(dataConn *colonycore.Connection) *queryWrapper { if toolkit.HasMember([]string{"json", "jsons", "csv", "csvs"}, dataConn.Driver) && strings.HasPrefix(dataConn.Host, "http") { return Query(dataConn.Driver, dataConn.FileLocation, "", "", "", dataConn.Settings) } return Query(dataConn.Driver, dataConn.Host, dataConn.Database, dataConn.UserName, dataConn.Password, dataConn.Settings) }
func ConnectUsingDataConn(dataConn *colonycore.Connection) *queryWrapper { if toolkit.HasMember([]string{"json", "jsons", "csv", "csvs"}, dataConn.Driver) && strings.HasPrefix(dataConn.Host, "http") { if toolkit.IsFileExist(dataConn.FileLocation) || dataConn.FileLocation == "" { fileTempID := RandomIDWithPrefix("f") fileType := GetFileExtension(dataConn.Host) dataConn.FileLocation = fmt.Sprintf("%s.%s", filepath.Join(os.Getenv("EC_DATA_PATH"), "datasource", "upload", fileTempID), fileType) file, err := os.Create(dataConn.FileLocation) if err != nil { os.Remove(dataConn.FileLocation) } else { defer file.Close() } resp, err := http.Get(dataConn.Host) if err != nil { os.Remove(dataConn.FileLocation) } else { defer resp.Body.Close() } _, err = io.Copy(file, resp.Body) if err != nil { os.Remove(dataConn.FileLocation) } colonycore.Save(dataConn) } return Query(dataConn.Driver, dataConn.FileLocation, "", "", "", dataConn.Settings) } return Query(dataConn.Driver, dataConn.Host, dataConn.Database, dataConn.UserName, dataConn.Password, dataConn.Settings) }
func (q *Query) execQueryPartDelete() error { var e error e = q.startWriteMode() if e != nil { return err.Error(packageName, modQuery, "Exec-Delete: ", e.Error()) } writer := q.writer reader := q.reader tempHeader := []string{} for _, val := range q.headerColumn { tempHeader = append(tempHeader, val.name) } var i int = 0 for { i += 1 dataTemp, e := reader.Read() if e == io.EOF { if !toolkit.HasMember(q.indexes, i) && len(dataTemp) > 0 { writer.Write(dataTemp) writer.Flush() } break } else if e != nil { _ = q.endWriteMode() return err.Error(packageName, modQuery, "Exec-Delete: ", e.Error()) } if !toolkit.HasMember(q.indexes, i) && len(dataTemp) > 0 { writer.Write(dataTemp) writer.Flush() } } q.execOpr = true e = q.endWriteMode() if e != nil { return err.Error(packageName, modQuery, "Exec-Delete: ", e.Error()) } return nil }
func (d *DataSourceController) SaveConnection(r *knot.WebContext) interface{} { r.Config.OutputType = knot.OutputJson payload := map[string]interface{}{} err := r.GetPayload(&payload) if err != nil { return helper.CreateResult(false, nil, err.Error()) } o := new(colonycore.Connection) o.ID = payload["_id"].(string) o.Driver = payload["Driver"].(string) o.Host = payload["Host"].(string) o.Database = payload["Database"].(string) o.UserName = payload["UserName"].(string) o.Password = payload["Password"].(string) o.Settings = d.parseSettings(payload["Settings"], map[string]interface{}{}).(map[string]interface{}) err = colonycore.Delete(o) if err != nil { return helper.CreateResult(false, nil, err.Error()) } if toolkit.HasMember([]string{"csv", "json"}, o.Driver) { if strings.HasPrefix(o.Host, "http") { fileType := helper.GetFileExtension(o.Host) o.FileLocation = fmt.Sprintf("%s.%s", filepath.Join(EC_DATA_PATH, "datasource", "upload", o.ID), fileType) file, err := os.Create(o.FileLocation) if err != nil { return helper.CreateResult(false, nil, err.Error()) } defer file.Close() resp, err := http.Get(o.Host) if err != nil { return helper.CreateResult(false, nil, err.Error()) } defer resp.Body.Close() _, err = io.Copy(file, resp.Body) if err != nil { return helper.CreateResult(false, nil, err.Error()) } } else { o.FileLocation = o.Host } } err = colonycore.Save(o) if err != nil { return helper.CreateResult(false, nil, err.Error()) } return helper.CreateResult(true, nil, "") }
func extractFields(data toolkit.M) []string { var attributes []string if data != nil { for field, val := range data { var datatypelist = []string{"map", "invalid", "struct", "slice"} var value reflect.Value if val != nil { value = reflect.Zero(reflect.TypeOf(val)) } if toolkit.HasMember(datatypelist, value.Kind().String()) { continue } attributes = append(attributes, field) } } return attributes }
func (q *Query) openFile(commandtype string) error { if q.fileHasBeenOpened { return nil } _, e := os.Stat(q.jsonPath) setting := q.Connection().Info().Settings if e != nil && toolkit.HasMember([]interface{}{dbox.QueryPartSave, dbox.QueryPartInsert}, commandtype) && strings.Contains(e.Error(), "cannot find the file specified") && setting != nil { newfile := setting.Get("newfile", false).(bool) if newfile { e = q.writeFile() if e != nil { return err.Error(packageName, modQuery, "openFile: "+commandtype+" Write fail", e.Error()) } } else { return err.Error(packageName, modQuery, "openFile: "+commandtype+" Create new file is false", e.Error()) } } else if e != nil && (strings.Contains(e.Error(), "does not exist") || strings.Contains(e.Error(), "no such file or directory")) { q.data = []toolkit.M{} return nil } else if e != nil { return err.Error(packageName, modQuery, "openFile: Open file fail", e.Error()) } bs, e := ioutil.ReadFile(q.jsonPath) if e != nil { return err.Error(packageName, modQuery, "openFile: Read file data fail", e.Error()) } jsonText := string(bs) var tempData []toolkit.M e = toolkit.UnjsonFromString(jsonText, &tempData) if e != nil { return err.Error(packageName, modQuery, "openFile: Serializaion fail", e.Error()) } q.data = tempData q.fileHasBeenOpened = true return nil }
func (d *DataSourceController) GetDataSourceCollections(r *knot.WebContext) interface{} { r.Config.OutputType = knot.OutputJson payload := map[string]interface{}{} err := r.GetPayload(&payload) if err != nil { return helper.CreateResult(false, nil, err.Error()) } connectionID := payload["connectionID"].(string) if connectionID == "" { return helper.CreateResult(true, []string{}, "") } dataConn := new(colonycore.Connection) err = colonycore.Get(dataConn, connectionID) if err != nil { return helper.CreateResult(false, nil, err.Error()) } if err := d.checkIfDriverIsSupported(dataConn.Driver); err != nil { return helper.CreateResult(false, nil, err.Error()) } if toolkit.HasMember([]string{"csv", "json"}, dataConn.Driver) { return helper.CreateResult(true, []string{dataConn.Driver}, "") } var conn dbox.IConnection conn, err = helper.ConnectUsingDataConn(dataConn).Connect() if err != nil { return helper.CreateResult(false, nil, err.Error()) } defer conn.Close() collections := conn.ObjectNames(dbox.ObjTypeTable) return helper.CreateResult(true, collections, "") }
func (d *DataGrabberController) getLogger(dataGrabber *colonycore.DataGrabber) (*toolkit.LogEngine, error) { logAt := time.Now().Format("20060102-150405") logFileName := strings.Split(logAt, "-")[0] logFileNameParsed := fmt.Sprintf("%s-%s", dataGrabber.ID, logFileName) logFilePattern := "" logConf, err := toolkit.NewLog(false, true, dgLogPath, logFileNameParsed, logFilePattern) if err != nil { return nil, err } currentDataGrabber := new(colonycore.DataGrabber) err = colonycore.Get(currentDataGrabber, dataGrabber.ID) if err != nil { logConf.AddLog(err.Error(), "ERROR") return logConf, err } if currentDataGrabber.RunAt == nil { currentDataGrabber.RunAt = []string{} } if !toolkit.HasMember(currentDataGrabber.RunAt, logAt) { currentDataGrabber.RunAt = append(currentDataGrabber.RunAt, logAt) } err = colonycore.Delete(currentDataGrabber) if err != nil { logConf.AddLog(err.Error(), "ERROR") return logConf, err } err = colonycore.Save(currentDataGrabber) if err != nil { logConf.AddLog(err.Error(), "ERROR") return logConf, err } return logConf, nil }
func toInterface(data string, dataType string, dateFormat string) interface{} { if dataType == "" { if strings.HasPrefix(data, "#") && strings.HasSuffix(data, "#") { dataType = DataDate } else { vfloat := toolkit.ToFloat64(dataType, 2, toolkit.RoundingAuto) vint := toolkit.ToInt(dataType, toolkit.RoundingAuto) if int(vfloat) == vint && vint != 0 { dataType = DataInt } else if vfloat != 0 { // dataType = DataFloat b, i := toolkit.MemberIndex(DataFloats, dataType) if b { for idx, dataFloat := range DataFloats { if idx == i { dataType = dataFloat } } } } else { dataType = DataString } } } if dataType == DataDate { return toolkit.String2Date(data, dateFormat) } else if dataType == DataInt { return toolkit.ToInt(data, toolkit.RoundingAuto) } else if toolkit.HasMember(DataFloats, dataType) { return toolkit.ToFloat64(data, 2, toolkit.RoundingAuto) } else { return data } return nil }
func (d *DataGrabberController) GenerateNewField(payload *colonycore.DataGrabber) (bool, error) { dsDestination := new(colonycore.DataSource) err := colonycore.Get(dsDestination, payload.DataSourceDestination) if err != nil { return false, err } dataConnDest := new(colonycore.Connection) err = colonycore.Get(dataConnDest, dsDestination.ConnectionID) if err != nil { return false, err } tableName := dsDestination.QueryInfo.GetString("from") dataDSnm, _, conn, query, _, err := CreateDataSourceController(d.Server).ConnectToDataSource(payload.DataSourceDestination) if err != nil { return false, err } defer conn.Close() cursor, err := query.Cursor(nil) if err != nil { return false, err } defer cursor.Close() dataNewDest := toolkit.M{} if !toolkit.HasMember([]string{"json", "mysql"}, dataConnDest.Driver) { err = cursor.Fetch(&dataNewDest, 1, false) } else { dataAll := []toolkit.M{} err = cursor.Fetch(&dataAll, 1, false) if err != nil { return false, err } if len(dataAll) > 0 { dataNewDest = dataAll[0] } } var fieldID = []string{} for _, each := range dataDSnm.MetaData { fieldID = append(fieldID, each.ID) } for _, each := range payload.Maps { //pengecekan ada field maps atau tidak pada metaddata dsDestination if !toolkit.HasMember(fieldID, each.Destination) { //create new field dataNewDest.Set(each.Destination, nil) } } err = conn.NewQuery().Save().From(tableName).Exec(toolkit.M{"data": dataNewDest}) if err != nil { return false, err } return true, nil }
func (q *Query) Exec(parm toolkit.M) error { var ( e error updatedValue, dataMs []toolkit.M dataM toolkit.M ) filters, e := q.Filters(parm) if e != nil { return errorlib.Error(packageName, modQuery, "Exec", e.Error()) } if parm == nil { parm = toolkit.M{} } data := parm.Get("data", nil) filePath := q.Connection().(*Connection).filePath commandType := filters.Get("cmdType").(string) hasWhere := filters.Has("where") hasCmdType := toolkit.M{} hasData := parm.Has("data") getWhere := filters.Get("where", []*dbox.Filter{}).([]*dbox.Filter) dataIsSlice := toolkit.IsSlice(data) if dataIsSlice { e = toolkit.Unjson(toolkit.Jsonify(data), &dataMs) if e != nil { return errorlib.Error(packageName, modQuery, "Exec: "+commandType, "Data encoding error: "+e.Error()) } for _, v := range dataMs { id := toolkit.Id(v) idF := toolkit.IdField(v) if toolkit.IsNilOrEmpty(id) { return errorlib.Error(packageName, modCursor+".Exec", commandType, "Unable to find ID in slice data") } else { getWhere = []*dbox.Filter{dbox.Eq(idF, id)} } } } else { dataM, e = toolkit.ToM(data) if e != nil { return errorlib.Error(packageName, modQuery, "Exec: "+commandType, "Unable to Map, error: "+e.Error()) } id := toolkit.Id(dataM) if !toolkit.IsNilOrEmpty(id) { getWhere = []*dbox.Filter{dbox.Eq(toolkit.IdField(dataM), id)} } } var dataMaps []toolkit.M q.ReadFile(&dataMaps, filePath) if commandType == dbox.QueryPartInsert { hasCmdType.Set("hasInsert", true) if !hasData { return errorlib.Error(packageName, modCursor+".Exec", commandType, "Sorry data not found!, unable to insert data") } result := dbox.Find(dataMaps, getWhere) if len(result) > 0 { return errorlib.Error(packageName, modCursor+".Exec", commandType, "ID already exist, unable insert data ") } if dataIsSlice { var sliceData []toolkit.M for _, v := range dataMs { sliceData = finUpdateObj(dataMaps, v, "insert") } updatedValue = sliceData } else { updatedValue = finUpdateObj(dataMaps, dataM, "insert") } } else if commandType == dbox.QueryPartUpdate { hasCmdType.Set("hasUpdate", true) if !hasData { return errorlib.Error(packageName, modCursor+".Exec", commandType, "Sorry data not found!, unable to update data") } if hasWhere { var indexes []interface{} whereIndex := dbox.Find(dataMaps, getWhere) indexes = toolkit.ToInterfaceArray(&whereIndex) // toolkit.Printf("whereIndex>%v indexes%v\n", whereIndex, indexes) var dataUpdate toolkit.M var updateDataIndex int isDataSlice := toolkit.IsSlice(data) if isDataSlice == false { isDataSlice = false data, e = toolkit.ToM(data) if e != nil { return errorlib.Error(packageName, modQuery, "Exec: "+commandType, "Serde data fail"+e.Error()) } e = toolkit.Serde(data, &dataUpdate, "") if e != nil { return errorlib.Error(packageName, modQuery, "Exec: "+commandType, "Serde data fail"+e.Error()) } } for i, v := range dataMaps { if toolkit.HasMember(indexes, i) || !hasWhere { if isDataSlice { e = toolkit.Serde(toolkit.SliceItem(data, updateDataIndex), &dataUpdate, "") if e != nil { return errorlib.Error(packageName, modQuery, "Exec: "+commandType, "Serde data fail"+e.Error()) } updateDataIndex++ } dataOrigin := dataMaps[i] toolkit.CopyM(&dataUpdate, &dataOrigin, false, []string{"_id"}) toolkit.Serde(dataOrigin, &v, "") dataMaps[i] = v } } updatedValue = dataMaps } else { updatedValue = finUpdateObj(dataMaps, dataM, "update") } } else if commandType == dbox.QueryPartDelete { hasCmdType.Set("hasDelete", true) // if multi { if hasWhere { result := dbox.Find(dataMaps, getWhere) if len(result) > 0 { for i, v := range dataMaps { if toolkit.HasMember(result, i) == false { updatedValue = append(updatedValue, v) } } } } else { updatedValue = []toolkit.M{} } } else if commandType == dbox.QueryPartSave { hasCmdType.Set("hasSave", true) if !hasData { return errorlib.Error(packageName, modCursor+".Exec", commandType, "Sorry data not found!, unable to update data") } q.dataType = "save" q.whereData = append(q.whereData, getWhere...) q.sliceData = append(q.sliceData, dataM) } if hasCmdType.Has("hasInsert") || hasCmdType.Has("hasUpdate") || hasCmdType.Has("hasDelete") { e = q.WriteFile(updatedValue) if e != nil { return errorlib.Error(packageName, modQuery+".Exec", commandType, e.Error()) } } return nil }
func (d *DataSourceController) ConnectToDataSourceDB(payload toolkit.M) (int, []toolkit.M, *colonycore.DataBrowser, error) { var hasLookup bool toolkit.Println("payload : ", payload) if payload.Has("haslookup") { hasLookup = payload.Get("haslookup").(bool) } _id := toolkit.ToString(payload.Get("browserid", "")) sort := payload.Get("sort") search := payload.Get("search") _ = search take := toolkit.ToInt(payload.Get("take", ""), toolkit.RoundingAuto) skip := toolkit.ToInt(payload.Get("skip", ""), toolkit.RoundingAuto) TblName := toolkit.M{} payload.Unset("browserid") //sorter = "" if sort != nil { tmsort, _ := toolkit.ToM(sort.([]interface{})[0]) fmt.Printf("====== sort %#v\n", tmsort["dir"]) if tmsort["dir"] == "asc" { sorter = tmsort["field"].(string) } else if tmsort["dir"] == "desc" { sorter = "-" + tmsort["field"].(string) } else if tmsort["dir"] == nil { sorter = " " } } else { sorter = " " } dataDS := new(colonycore.DataBrowser) err := colonycore.Get(dataDS, _id) if err != nil { return 0, nil, nil, err } dataConn := new(colonycore.Connection) err = colonycore.Get(dataConn, dataDS.ConnectionID) if err != nil { return 0, nil, nil, err } if err := d.checkIfDriverIsSupported(dataConn.Driver); err != nil { return 0, nil, nil, err } connection, err := helper.ConnectUsingDataConn(dataConn).Connect() if err != nil { return 0, nil, nil, err } if dataDS.QueryType == "" { TblName.Set("from", dataDS.TableNames) payload.Set("from", dataDS.TableNames) } else if dataDS.QueryType == "Dbox" { getTableName := toolkit.M{} toolkit.UnjsonFromString(dataDS.QueryText, &getTableName) payload.Set("from", getTableName.Get("from").(string)) if qSelect := getTableName.Get("select", "").(string); qSelect != "" { payload.Set("select", getTableName.Get("select").(string)) } } else if dataDS.QueryType == "SQL" { var QueryString string if dataConn.Driver == "mysql" || dataConn.Driver == "hive" { QueryString = " LIMIT " + toolkit.ToString(take) + " OFFSET " + toolkit.ToString(skip) } else if dataConn.Driver == "mssql" { QueryString = " OFFSET " + toolkit.ToString(skip) + " ROWS FETCH NEXT " + toolkit.ToString(take) + " ROWS ONLY " } else if dataConn.Driver == "postgres" { QueryString = " LIMIT " + toolkit.ToString(take) + " OFFSET " + toolkit.ToString(skip) } stringQuery := toolkit.Sprintf("%s %s", dataDS.QueryText, QueryString) payload.Set("freetext", stringQuery) // toolkit.Println(stringQuery) } qcount, _ := d.parseQuery(connection.NewQuery(), TblName) query, _ := d.parseQuery(connection.NewQuery() /*.Skip(skip).Take(take) .Order(sorter)*/, payload) var selectfield string for _, metadata := range dataDS.MetaData { tField := metadata.Field if payload.Has(tField) { selectfield = toolkit.ToString(tField) if toolkit.IsSlice(payload[tField]) { query = query.Where(dbox.In(tField, payload[tField].([]interface{})...)) qcount = qcount.Where(dbox.In(tField, payload[tField].([]interface{})...)) } else if !toolkit.IsNilOrEmpty(payload[tField]) { var hasPattern bool for _, val := range querypattern { if strings.Contains(toolkit.ToString(payload[tField]), val) { hasPattern = true } } if hasPattern { query = query.Where(dbox.ParseFilter(toolkit.ToString(tField), toolkit.ToString(payload[tField]), toolkit.ToString(metadata.DataType), "")) qcount = qcount.Where(dbox.ParseFilter(toolkit.ToString(tField), toolkit.ToString(payload[tField]), toolkit.ToString(metadata.DataType), "")) } else { switch toolkit.ToString(metadata.DataType) { case "int": query = query.Where(dbox.Eq(tField, toolkit.ToInt(payload[tField], toolkit.RoundingAuto))) qcount = qcount.Where(dbox.Eq(tField, toolkit.ToInt(payload[tField], toolkit.RoundingAuto))) case "float32": query = query.Where(dbox.Eq(tField, toolkit.ToFloat32(payload[tField], 2, toolkit.RoundingAuto))) qcount = qcount.Where(dbox.Eq(tField, toolkit.ToFloat32(payload[tField], 2, toolkit.RoundingAuto))) case "float64": query = query.Where(dbox.Eq(tField, toolkit.ToFloat64(payload[tField], 2, toolkit.RoundingAuto))) qcount = qcount.Where(dbox.Eq(tField, toolkit.ToFloat64(payload[tField], 2, toolkit.RoundingAuto))) default: query = query.Where(dbox.Contains(tField, toolkit.ToString(payload[tField]))) qcount = qcount.Where(dbox.Contains(tField, toolkit.ToString(payload[tField]))) } } } } } if hasLookup && selectfield != "" { if toolkit.HasMember(ds_flatfile, dataConn.Driver) { query = query.Select(selectfield) qcount = qcount.Select(selectfield) } else { query = query.Select(selectfield).Group(selectfield) qcount = qcount.Select(selectfield).Group(selectfield) } } ccount, err := qcount.Cursor(nil) if err != nil { return 0, nil, nil, err } defer ccount.Close() dcount := ccount.Count() cursor, err := query.Cursor(nil) if err != nil { return 0, nil, nil, err } defer cursor.Close() data := []toolkit.M{} cursor.Fetch(&data, 0, false) if err != nil { return 0, nil, nil, err } if hasLookup && selectfield != "" && !toolkit.HasMember(ds_rdbms, dataConn.Driver) && !toolkit.HasMember(ds_flatfile, dataConn.Driver) { dataMongo := []toolkit.M{} for _, val := range data { mVal, _ := toolkit.ToM(val.Get("_id")) dataMongo = append(dataMongo, mVal) } data = dataMongo } else if hasLookup && selectfield != "" && toolkit.HasMember(ds_flatfile, dataConn.Driver) { /*distinct value for flat file*/ dataFlat := []toolkit.M{} var existingVal = []string{""} for _, val := range data { valString := toolkit.ToString(val.Get(selectfield)) if !toolkit.HasMember(existingVal, valString) { dataFlat = append(dataFlat, val) existingVal = append(existingVal, valString) } } data = dataFlat } return dcount, data, dataDS, nil }
func (g *GetDatabase) GetQuery(dataSettingId string) (iQ dbox.IQuery, err error) { g.conn, err = dbox.NewConnection(g.desttype, &g.ConnectionInfo) if err != nil { return } err = g.conn.Connect() if err != nil { return } // defer c.Close() iQ = g.conn.NewQuery() if g.CollectionSettings[dataSettingId].Collection != "" { iQ.From(g.CollectionSettings[dataSettingId].Collection) } aSelect := make([]string, 0, 0) for _, val := range g.CollectionSettings[dataSettingId].MapsColumns { tstring := val.Source if strings.Contains(val.Source, "|") { splitstring := strings.Split(val.Source, "|") tstring = splitstring[0] } if tstring != "" && toolkit.HasMember(aSelect, tstring) { aSelect = append(aSelect, tstring) } } if len(aSelect) > 0 { iQ.Select(aSelect...) } if len(g.CollectionSettings[dataSettingId].FilterCond) > 0 { iQ.Where(g.CollectionSettings[dataSettingId].filterDbox) } if g.CollectionSettings[dataSettingId].Take > 0 { iQ.Take(g.CollectionSettings[dataSettingId].Take) } iQ.Take(g.CollectionSettings[dataSettingId].Skip) return // csr, e := iQ.Cursor(nil) // if e != nil { // return e // } // if csr == nil { // return e // } // defer csr.Close() // results := make([]toolkit.M, 0) // e = csr.Fetch(&results, 0, false) // if e != nil { // return e // } // ms := []toolkit.M{} // for _, val := range results { // m := toolkit.M{} // for _, column := range g.CollectionSettings[dataSettingId].MapsColumns { // m.Set(column.Source, "") // if val.Has(column.Destination) { // m.Set(column.Source, val[column.Destination]) // } // } // ms = append(ms, m) // } // if edecode := toolkit.Unjson(toolkit.Jsonify(ms), out); edecode != nil { // return edecode // } // return nil }
func (d *DataGrabberController) AutoGenerateDataSources(payload *colonycore.DataGrabberWizardPayload, formatTime string) ([]*colonycore.DataGrabber, error) { var connSource dbox.IConnection var connDest dbox.IConnection var collObjectNames []string var dirpath string isNosql := false result := []*colonycore.DataGrabber{} dataConnSource := new(colonycore.Connection) //pengambilan data untuk mengecek driver destination (mongo, json, csv, sql dan lain-lain) dataConnDest := new(colonycore.Connection) err := colonycore.Get(dataConnDest, payload.ConnectionDestination) if err != nil { return result, err } if !toolkit.HasMember([]string{"mysql", "hive"}, dataConnDest.Driver) { //mengambil nilai object atau tabel yang ada didestination connDest, err = helper.ConnectUsingDataConn(dataConnDest).Connect() if err != nil { return result, err } defer connDest.Close() if toolkit.HasMember([]string{"json", "csv"}, dataConnDest.Driver) { var filedata string dirpath, filedata = filepath.Split(dataConnDest.FileLocation) collObjectNames = []string{strings.Split(filedata, ".")[0]} } else { collObjectNames = connDest.ObjectNames(dbox.ObjTypeAll) } //connection source/from err := colonycore.Get(dataConnSource, payload.ConnectionSource) if err != nil { return result, err } connSource, err = helper.ConnectUsingDataConn(dataConnSource).Connect() if err != nil { return result, err } defer connSource.Close() isNosql = true } for key, each := range payload.Transformations { if tDest := strings.TrimSpace(each.TableDestination); tDest != "" { var connectionIDDest string if isNosql { //pengecekan tidak adanya tabel di connection destination if each.TableDestination != "" && !toolkit.HasMember(collObjectNames, each.TableDestination) { //pengambilan ds metadata sesuai dengan table source var queryS = connSource.NewQuery().Take(1) if !toolkit.HasMember([]string{"csv", "json"}, dataConnSource.Driver) { queryS = queryS.From(each.TableSource) } csr, err := queryS.Cursor(nil) if err != nil { return result, err } defer csr.Close() data := toolkit.M{} if !toolkit.HasMember([]string{"json", "mysql"}, dataConnSource.Driver) { err = csr.Fetch(&data, 1, false) } else { dataAll := []toolkit.M{} err = csr.Fetch(&dataAll, 1, false) if err != nil { return result, err } if len(dataAll) > 0 { data = dataAll[0] } } if toolkit.HasMember([]string{"csv", "json"}, dataConnDest.Driver) { // filepath.WalkFunc o := new(colonycore.Connection) exts := filepath.Ext(dataConnDest.FileLocation) extstrim := strings.TrimPrefix(exts, ".") newpath := filepath.Join(dirpath, each.TableDestination+exts) connectionIDDest = fmt.Sprintf("conn_%s_%s", extstrim, formatTime) o.ID = connectionIDDest o.Driver = extstrim o.Host = newpath if dataConnDest.Driver == "csv" { o.Settings = toolkit.M{"newfile": true, "useheader": true, "delimiter": ","} } else { o.Settings = toolkit.M{"newfile": true} } if strings.HasPrefix(o.Host, "http") { fileType := helper.GetFileExtension(o.Host) o.FileLocation = fmt.Sprintf("%s.%s", filepath.Join(EC_DATA_PATH, "datasource", "upload", o.ID), fileType) file, err := os.Create(o.FileLocation) if err != nil { return nil, err } defer file.Close() resp, err := http.Get(o.Host) if err != nil { return nil, err } defer resp.Body.Close() _, err = io.Copy(file, resp.Body) if err != nil { return nil, err } } else { o.FileLocation = o.Host } err := colonycore.Save(o) if err != nil { return result, err } newconnDest, err := helper.ConnectUsingDataConn(o).Connect() if err != nil { return result, err } defer newconnDest.Close() err = newconnDest.NewQuery().Save().Exec(toolkit.M{"data": data}) if err != nil { return result, err } } else { err = connDest.NewQuery().Save().From(each.TableDestination).Exec(toolkit.M{"data": data}) if err != nil { return result, err } } } } var prevDS string var nextDS string mapGrabber := []*colonycore.Map{} for i := 0; i < 2; i++ { var valueFrom string var connectionID string prefix := "" if t := strings.TrimSpace(payload.Prefix); t != "" { prefix = fmt.Sprintf("%s_", t) } cdsID := fmt.Sprintf("%sDS_%d_%d_%s", prefix, i, key, formatTime) if i == 0 { //table source valueFrom = each.TableSource connectionID = payload.ConnectionSource } else { //table destination valueFrom = each.TableDestination if !toolkit.HasMember([]string{"csv", "json"}, dataConnDest.Driver) { connectionID = payload.ConnectionDestination } else { connectionID = connectionIDDest } } squery := fmt.Sprintf(`{"from":"%s", "select":"*"}`, valueFrom) queryinf := toolkit.M{} json.Unmarshal([]byte(squery), &queryinf) dataDs := []colonycore.DataSource{} cursor, err := colonycore.Find(new(colonycore.DataSource), dbox.Eq("ConnectionID", connectionID)) cursor.Fetch(&dataDs, 0, false) if err != nil { return result, err } defer cursor.Close() dataConn := new(colonycore.Connection) err = colonycore.Get(dataConn, connectionID) if err != nil { return result, err } resultDataRaw := []toolkit.M{} if cursor.Count() > 0 { for _, eachData := range dataDs { resultEachData := toolkit.M{} qFrom := eachData.QueryInfo.Get("from").(string) isSelectExists := eachData.QueryInfo.Has("select") if qFrom == valueFrom && isSelectExists { resultEachData.Set(valueFrom, eachData.ID) resultDataRaw = append(resultDataRaw, resultEachData) if i == 1 { for _, eachMetadata := range eachData.MetaData { mapGrabberField := colonycore.Map{Source: eachMetadata.ID, SourceType: eachMetadata.Type, Destination: eachMetadata.ID, DestinationType: eachMetadata.Type} mapGrabber = append(mapGrabber, &mapGrabberField) } } } } if len(resultDataRaw) == 0 { resultEachData := toolkit.M{} resultEachData.Set(valueFrom, cdsID) resultDataRaw = append(resultDataRaw, resultEachData) cds := new(colonycore.DataSource) cds.ID = cdsID cds.ConnectionID = connectionID cds.MetaData = []*colonycore.FieldInfo{} cds.QueryInfo = queryinf _, metadata, err := CreateDataSourceController(d.Server).DoFetchDataSourceMetaData(dataConn, valueFrom) if err != nil { return result, err } cds.MetaData = metadata err = colonycore.Save(cds) if err != nil { return result, err } if i == 1 { for _, eachMetadata := range metadata { mapGrabberField := colonycore.Map{Source: eachMetadata.ID, SourceType: eachMetadata.Type, Destination: eachMetadata.ID, DestinationType: eachMetadata.Type} mapGrabber = append(mapGrabber, &mapGrabberField) } } } } else { resultEachData := toolkit.M{} resultEachData.Set(valueFrom, cdsID) resultDataRaw = append(resultDataRaw, resultEachData) cds := new(colonycore.DataSource) cds.ID = cdsID cds.ConnectionID = connectionID cds.MetaData = []*colonycore.FieldInfo{} cds.QueryInfo = queryinf _, metadata, err := CreateDataSourceController(d.Server).DoFetchDataSourceMetaData(dataConn, valueFrom) if err != nil { return result, err } cds.MetaData = metadata err = colonycore.Save(cds) if err != nil { return result, err } if i == 1 { for _, eachMetadata := range metadata { mapGrabberField := colonycore.Map{Source: eachMetadata.ID, SourceType: eachMetadata.Type, Destination: eachMetadata.ID, DestinationType: eachMetadata.Type} mapGrabber = append(mapGrabber, &mapGrabberField) } } } for _, resd := range resultDataRaw { if i == 0 { //table source prevDS = resd.Get(valueFrom).(string) break } else { //table destination nextDS = resd.Get(valueFrom).(string) break } } } prefix := "" if t := strings.TrimSpace(payload.Prefix); t != "" { prefix = fmt.Sprintf("%s_", t) } owiz := new(colonycore.DataGrabber) owiz.ID = fmt.Sprintf("%sDG_%s_%s", prefix, strconv.Itoa(key), formatTime) owiz.DataSourceOrigin = prevDS owiz.DataSourceDestination = nextDS owiz.IsFromWizard = true owiz.GrabInterval = 20 owiz.IntervalType = "seconds" owiz.InsertMode = "append" // owiz.Maps = []*colonycore.Map{} owiz.Maps = mapGrabber owiz.PostTransferCommand = "" owiz.PreTransferCommand = "" owiz.TimeoutInterval = 20 owiz.UseInterval = false owiz.RunAt = []string{} err := colonycore.Save(owiz) if err != nil { return result, err } result = append(result, owiz) } } return result, nil }
func (d *DataGrabberController) Transform(dataGrabber *colonycore.DataGrabber) (bool, []toolkit.M, string) { logConf, err := d.getLogger(dataGrabber) if err != nil { logConf.AddLog(err.Error(), "ERROR") defer logConf.Close() } message := fmt.Sprintf("===> Transformation started! %s -> %s interval %d %s", dataGrabber.DataSourceOrigin, dataGrabber.DataSourceDestination, dataGrabber.GrabInterval, dataGrabber.IntervalType) logConf.AddLog(message, "SUCCESS") fmt.Println(message) dsOrigin := new(colonycore.DataSource) err = colonycore.Get(dsOrigin, dataGrabber.DataSourceOrigin) if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } dsDestination := new(colonycore.DataSource) err = colonycore.Get(dsDestination, dataGrabber.DataSourceDestination) if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } dataDS, _, conn, query, metaSave, err := new(DataSourceController). ConnectToDataSource(dataGrabber.DataSourceOrigin) if len(dataDS.QueryInfo) == 0 { message := "Data source origin has invalid query" logConf.AddLog(message, "ERROR") return false, nil, message } if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } defer conn.Close() if metaSave.keyword != "" { message := `Data source origin query is not "Select"` logConf.AddLog(message, "ERROR") return false, nil, message } cursor, err := query.Cursor(nil) if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } defer cursor.Close() data := []toolkit.M{} err = cursor.Fetch(&data, 0, false) if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } connDesc := new(colonycore.Connection) err = colonycore.Get(connDesc, dsDestination.ConnectionID) if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } const FLAG_ARG_DATA string = `%1` transformedData := []toolkit.M{} for _, each := range data { eachTransformedData := toolkit.M{} for _, eachMap := range dataGrabber.Maps { var valueEachSourceField interface{} // ============================================ SOURCE if !strings.Contains(eachMap.Source, "|") { // source could be: field, object, array valueEachSourceField = each.Get(eachMap.Source) } else { // source could be: field of object, field of array-objects prev := strings.Split(eachMap.Source, "|")[0] next := strings.Split(eachMap.Source, "|")[1] var fieldInfoDes *colonycore.FieldInfo = nil for _, eds := range dsOrigin.MetaData { if eds.ID == prev { fieldInfoDes = eds break } } if fieldInfoDes != nil { // source is field of array-objects if fieldInfoDes.Type == "array-objects" { valueObjects := []interface{}{} if temp, _ := each.Get(prev, nil).([]interface{}); temp != nil { valueObjects = make([]interface{}, len(temp)) for i, each := range temp { if tempSub, _ := toolkit.ToM(each); tempSub != nil { valueObjects[i] = tempSub.Get(next) } } } valueEachSourceField = valueObjects } else { // source is field of object valueObject := toolkit.M{} if valueObject, _ = toolkit.ToM(each.Get(prev)); valueObject != nil { valueEachSourceField = valueObject.Get(next) } } } } // ============================================ DESTINATION if !strings.Contains(eachMap.Destination, "|") { if eachMap.SourceType == "object" { sourceObject, _ := toolkit.ToM(valueEachSourceField) if sourceObject == nil { sourceObject = toolkit.M{} } valueObject := toolkit.M{} for _, desMeta := range dsDestination.MetaData { if desMeta.ID == eachMap.Destination { for _, eachMetaSub := range desMeta.Sub { // valueObject.Set(eachMetaSub.ID, sourceObject.Get(eachMetaSub.ID)) valueObject.Set(eachMetaSub.ID, d.convertTo(sourceObject.Get(eachMetaSub.ID), eachMap.DestinationType)) } break } } eachTransformedData.Set(eachMap.Destination, valueObject) } else if eachMap.SourceType == "array-objects" { sourceObjects, _ := valueEachSourceField.([]interface{}) if sourceObjects == nil { sourceObjects = []interface{}{} } valueObjects := []interface{}{} for _, sourceObjectRaw := range sourceObjects { sourceObject, _ := toolkit.ToM(sourceObjectRaw) if sourceObject == nil { sourceObject = toolkit.M{} } valueObject := toolkit.M{} for _, desMeta := range dsDestination.MetaData { if desMeta.ID == eachMap.Destination { for _, eachMetaSub := range desMeta.Sub { // valueObject.Set(eachMetaSub.ID, sourceObject.Get(eachMetaSub.ID)) valueObject.Set(eachMetaSub.ID, d.convertTo(sourceObject.Get(eachMetaSub.ID), eachMap.DestinationType)) } break } } valueObjects = append(valueObjects, valueObject) } eachTransformedData.Set(eachMap.Destination, valueObjects) } else { if strings.Contains(eachMap.DestinationType, "array") { valueObjects := each.Get(eachMap.Source) eachTransformedData.Set(eachMap.Destination, valueObjects) } else { // eachTransformedData.Set(eachMap.Destination, convertDataType(eachMap.DestinationType, eachMap.Source, each)) eachTransformedData.Set(eachMap.Destination, d.convertTo(each.Get(eachMap.Source), eachMap.DestinationType)) } } } else { prev := strings.Split(eachMap.Destination, "|")[0] next := strings.Split(eachMap.Destination, "|")[1] var fieldInfoDes *colonycore.FieldInfo = nil for _, eds := range dsDestination.MetaData { if eds.ID == prev { fieldInfoDes = eds break } } if fieldInfoDes != nil { if fieldInfoDes.Type == "array-objects" { valueObjects := []interface{}{} if temp := eachTransformedData.Get(prev, nil); temp == nil { valueObjects = []interface{}{} } else { valueObjects, _ = temp.([]interface{}) if valueObjects == nil { valueObjects = []interface{}{} } } if temp, _ := valueEachSourceField.([]interface{}); temp != nil { for i, eachVal := range temp { valueObject := toolkit.M{} if len(valueObjects) > i { if temp2, _ := toolkit.ToM(valueObjects[i]); temp2 != nil { valueObject = temp2 // valueObject.Set(next, eachVal) valueObject.Set(next, d.convertTo(eachVal, eachMap.DestinationType)) } valueObjects[i] = valueObject } else { if fieldInfoDes.Sub != nil { for _, subMeta := range fieldInfoDes.Sub { valueObject.Set(subMeta.ID, nil) } } // valueObject.Set(next, eachVal) valueObject.Set(next, d.convertTo(eachVal, eachMap.DestinationType)) valueObjects = append(valueObjects, valueObject) } } } eachTransformedData.Set(prev, valueObjects) } else { valueObject, _ := toolkit.ToM(eachTransformedData.Get(prev)) if valueObject == nil { valueObject = toolkit.M{} } //tambahan prevSource := strings.Split(eachMap.Source, "|")[0] nextSource := strings.Split(eachMap.Source, "|")[1] mval, _ := toolkit.ToM(each.Get(prevSource, nil)) //========= valueObject.Set(next, d.convertTo(mval.Get(nextSource), eachMap.DestinationType)) // valueObject.Set(next, convertDataType(eachMap.DestinationType, nextSource, mval)) eachTransformedData.Set(prev, valueObject) } } } } transformedData = append(transformedData, eachTransformedData) dataToSave := eachTransformedData // ================ pre transfer command if dataGrabber.PreTransferCommand != "" { // jsonTranformedDataBytes, err := json.Marshal(each) jsonTranformedDataBytes, err := json.Marshal(eachTransformedData) if err != nil { return false, nil, err.Error() } jsonTranformedData := string(jsonTranformedDataBytes) var preCommand = dataGrabber.PreTransferCommand if strings.Contains(dataGrabber.PreTransferCommand, FLAG_ARG_DATA) { preCommand = strings.TrimSpace(strings.Replace(dataGrabber.PreTransferCommand, FLAG_ARG_DATA, "", -1)) } dataToSave = toolkit.M{} output, err := toolkit.RunCommand(preCommand, jsonTranformedData) fmt.Printf("===> Pre Transfer Command Result\n COMMAND -> %s %s\n OUTPUT -> %s\n", preCommand, jsonTranformedData, output) if err == nil { postData := toolkit.M{} if err := json.Unmarshal([]byte(output), &postData); err == nil { dataToSave = postData } } } // ================ if len(dataToSave) == 0 { continue } nilFieldDest := eachTransformedData for _, metadataDest := range dsDestination.MetaData { if temp := eachTransformedData.Get(metadataDest.ID); temp == nil { if metadataDest.ID != "_id" { if metadataDest.Type == "object" { valueObject := toolkit.M{} for _, eachMetaSub := range metadataDest.Sub { valueObject.Set(eachMetaSub.ID, nil) } nilFieldDest.Set(metadataDest.ID, valueObject) } else if metadataDest.Type == "array-objects" { valueEachSourceField := each.Get(metadataDest.ID) sourceObjects, _ := valueEachSourceField.([]interface{}) if sourceObjects == nil { sourceObjects = []interface{}{} } valueObjects := []interface{}{} for _, sourceObjectRaw := range sourceObjects { sourceObject, _ := toolkit.ToM(sourceObjectRaw) if sourceObject == nil { sourceObject = toolkit.M{} } valueObject := toolkit.M{} for keyss, _ := range sourceObject { valueObject.Set(keyss, nil) } valueObjects = append(valueObjects, valueObject) } nilFieldDest.Set(metadataDest.ID, valueObjects) } else { if strings.Contains(metadataDest.Type, "array") { valueObjects := []interface{}{} nilFieldDest.Set(metadataDest.ID, valueObjects) } else { nilFieldDest.Set(metadataDest.ID, nil) } } } } } tableName := dsDestination.QueryInfo.GetString("from") queryWrapper := helper.Query(connDesc.Driver, connDesc.Host, connDesc.Database, connDesc.UserName, connDesc.Password, connDesc.Settings) if dataGrabber.InsertMode == "fresh" { queryWrapper.Delete(tableName, dbox.Or()) } if eachTransformedData.Has("_id") { err = queryWrapper.Delete(tableName, dbox.Eq("_id", eachTransformedData.Get("_id"))) } if toolkit.HasMember([]string{"json", "jsons", "csv", "csvs"}, connDesc.Driver) && strings.HasPrefix(connDesc.Host, "http") { queryWrapper = helper.Query(connDesc.Driver, connDesc.FileLocation, "", "", "", connDesc.Settings) } else { queryWrapper = helper.Query(connDesc.Driver, connDesc.Host, connDesc.Database, connDesc.UserName, connDesc.Password, connDesc.Settings) } if !nilFieldDest.Has("_id") || nilFieldDest.Get("_id") == nil || nilFieldDest.GetString("_id") == "<nil>" { nilFieldDest.Set("_id", helper.RandomIDWithPrefix("")) } err = queryWrapper.Save(tableName, nilFieldDest) if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } // ================ post transfer command if dataGrabber.PostTransferCommand != "" { eachTransformedData = dataToSave jsonTranformedDataBytes, err := json.Marshal(eachTransformedData) if err != nil { return false, nil, err.Error() } jsonTranformedData := string(jsonTranformedDataBytes) var postCommand = dataGrabber.PostTransferCommand if strings.Contains(dataGrabber.PostTransferCommand, FLAG_ARG_DATA) { postCommand = strings.TrimSpace(strings.Replace(dataGrabber.PostTransferCommand, FLAG_ARG_DATA, "", -1)) } output, err := toolkit.RunCommand(postCommand, jsonTranformedData) fmt.Printf("===> Post Transfer Command Result\n COMMAND -> %s %s\n OUTPUT -> %s\n", postCommand, jsonTranformedData, output) } } message = fmt.Sprintf("===> Success transforming %v data", len(transformedData)) logConf.AddLog(message, "SUCCESS") fmt.Println(message) return true, transformedData, "" }
func (q *Query) Exec(in toolkit.M) error { setting, e := q.prepare(in) commandType := setting["commandtype"].(string) if e != nil { return err.Error(packageName, modQuery, "Exec: "+commandType, e.Error()) } if setting.GetString("commandtype") == dbox.QueryPartSelect { return err.Error(packageName, modQuery, "Exec: "+commandType, "Exec is not working with select command, please use .Cursor instead") } q.Lock() defer q.Unlock() var dataM toolkit.M var dataMs []toolkit.M hasData := in.Has("data") dataIsSlice := false data := in.Get("data") if toolkit.IsSlice(data) { dataIsSlice = true e = toolkit.Unjson(toolkit.Jsonify(data), dataMs) if e != nil { return err.Error(packageName, modQuery, "Exec: "+commandType, "Data encoding error: "+e.Error()) } } else { dataM, e = toolkit.ToM(data) dataMs = append(dataMs, dataM) if e != nil { return err.Error(packageName, modQuery, "Exec: "+commandType, "Data encoding error: "+e.Error()) } } hasWhere := in.Has("where") where := in.Get("where", []*dbox.Filter{}).([]*dbox.Filter) if hasData && hasWhere == false && toolkit.HasMember([]interface{}{dbox.QueryPartInsert, dbox.QueryPartUpdate, dbox.QueryPartSave}, commandType) { hasWhere = true if toolkit.IsSlice(data) { ids := []interface{}{} idField := "" if idField == "" { return err.Error(packageName, modQuery, "Exec:"+commandType, "Data send is a slice, but its element has no ID") } dataCount := toolkit.SliceLen(data) for i := 0; i < dataCount; i++ { dataI := toolkit.SliceItem(data, i) if i == 0 { idField = toolkit.IdField(dataI) } ids = append(ids, toolkit.Id(dataI)) } where = []*dbox.Filter{dbox.In(idField, ids)} } else { id := toolkit.Id(data) if toolkit.IsNilOrEmpty(id) { where = []*dbox.Filter{dbox.Eq(toolkit.IdField(id), id)} } else { where = nil hasWhere = false } } } q.openFile() if commandType == dbox.QueryPartInsert { if !hasData { return err.Error(packageName, modQuery, "Exec:"+commandType, "Data is empty") } if dataIsSlice { q.data = append(q.data, dataMs...) } else { q.data = append(q.data, dataM) } } else if commandType == dbox.QueryPartUpdate { if !hasData { return err.Error(packageName, modQuery, "Exec:"+commandType, "Data is empty") } var indexes []interface{} if hasWhere { toolkit.Serde(dbox.Find(q.data, where), &indexes, "") } var dataUpdate toolkit.M var updateDataIndex int isDataSlice := toolkit.IsSlice(data) if isDataSlice == false { isDataSlice = false e = toolkit.Serde(data, &dataUpdate, "") if e != nil { return err.Error(packageName, modQuery, "Exec:"+commandType, "Unable to serialize data. "+e.Error()) } } var idField string for i, v := range q.data { if toolkit.HasMember(indexes, i) || len(indexes) == 0 { if idField == "" { idField = toolkit.IdField(v) if idField == "" { return err.Error(packageName, modQuery, "Exec:"+commandType, "No ID") } } var dataOrigin toolkit.M e = toolkit.Serde(v, &dataOrigin, "") if e != nil { return err.Error(packageName, modQuery, "Exec:"+commandType, "Unable to serialize data origin. "+e.Error()) } if isDataSlice { e = toolkit.Serde(toolkit.SliceItem(data, updateDataIndex), &dataUpdate, "") if e != nil { return err.Error(packageName, modQuery, "Exec:"+commandType, "Unable to serialize data. "+e.Error()) } updateDataIndex++ } for fieldName, fieldValue := range dataUpdate { if fieldName != idField { if dataOrigin.Has(fieldName) { dataOrigin.Set(fieldName, fieldValue) } } } toolkit.Serde(dataOrigin, &v, "") q.data[i] = v } } } else if commandType == dbox.QueryPartDelete { if hasWhere { var indexes []interface{} toolkit.Serde(dbox.Find(q.data, where), &indexes, "") if len(indexes) > 0 { newdata := []toolkit.M{} for index, v := range q.data { if toolkit.HasMember(indexes, index) == false { newdata = append(newdata, v) } } q.data = newdata } } else { q.data = []toolkit.M{} } } else if commandType == dbox.QueryPartSave { if !hasData { return err.Error(packageName, modQuery, "Exec:"+commandType, "Data is empty") } } q.writeFile() return nil }
func (q *Query) Exec(in toolkit.M) error { setting, e := q.prepare(in) commandType := setting["commandtype"].(string) //toolkit.Printf("Command type: %s\n", commandType) if e != nil { return err.Error(packageName, modQuery, "Exec: "+commandType, e.Error()) } if setting.GetString("commandtype") == dbox.QueryPartSelect { return err.Error(packageName, modQuery, "Exec: "+commandType, "Exec is not working with select command, please use .Cursor instead") } q.Lock() defer q.Unlock() var dataM toolkit.M var dataMs []toolkit.M hasData := in.Has("data") dataIsSlice := false data := in.Get("data") if toolkit.IsSlice(data) { dataIsSlice = true e = toolkit.Unjson(toolkit.Jsonify(data), dataMs) if e != nil { return err.Error(packageName, modQuery, "Exec: "+commandType, "Data encoding error: "+e.Error()) } } else { dataM, e = toolkit.ToM(data) dataMs = append(dataMs, dataM) if e != nil { return err.Error(packageName, modQuery, "Exec: "+commandType, "Data encoding error: "+e.Error()) } } hasWhere := setting.Has("where") where := setting.Get("where", []*dbox.Filter{}).([]*dbox.Filter) if hasWhere && len(where) == 0 { inWhere := in.Get("where") if inWhere == nil { hasWhere = false where = nil } else { if !toolkit.IsSlice(inWhere) { where = append(where, inWhere.(*dbox.Filter)) } else { where = inWhere.([]*dbox.Filter) } } } if hasData && hasWhere == false && toolkit.HasMember([]interface{}{dbox.QueryPartInsert, dbox.QueryPartDelete, dbox.QueryPartUpdate, dbox.QueryPartSave}, commandType) { hasWhere = true //toolkit.Println("check where") if toolkit.IsSlice(data) { ids := []interface{}{} idField := "" if idField == "" { return err.Error(packageName, modQuery, "Exec: "+commandType, "Data send is a slice, but its element has no ID") } dataCount := toolkit.SliceLen(data) for i := 0; i < dataCount; i++ { dataI := toolkit.SliceItem(data, i) if i == 0 { idField = toolkit.IdField(dataI) } ids = append(ids, toolkit.Id(dataI)) } where = []*dbox.Filter{dbox.In(idField, ids)} } else { idfield := "_id" id := toolkit.Id(data) if !toolkit.IsNilOrEmpty(id) { where = []*dbox.Filter{dbox.Eq(idfield, id)} } else { where = nil hasWhere = false } } } /* toolkit.Printf("CommandType: %s HasData: %v HasWhere: %v Where: %s\n", commandType, hasData, hasWhere, toolkit.JsonString(where)) */ e = q.openFile(commandType) //toolkit.Printf(commandType+" Open File, found record: %d\nData:%s\n", len(q.data), toolkit.JsonString(q.data)) if e != nil { return err.Error(packageName, modQuery, "Exec: "+commandType, e.Error()) } var indexes []interface{} if hasWhere && commandType != dbox.QueryPartInsert { whereIndex := dbox.Find(q.data, where) indexes = toolkit.ToInterfaceArray(&whereIndex) //toolkit.Printf("Where Index: %s Index:%s\n", toolkit.JsonString(whereIndex), toolkit.JsonString(indexes)) } if commandType == dbox.QueryPartInsert { if !hasData { return err.Error(packageName, modQuery, "Exec: "+commandType, "Data is empty") } if !dataIsSlice { dataMs = []toolkit.M{dataM} } //-- validate for _, datam := range dataMs { idField, idValue := toolkit.IdInfo(datam) toolkit.Serde(dbox.Find(q.data, []*dbox.Filter{dbox.Eq(idField, idValue)}), &indexes, "") if len(indexes) > 0 { return err.Error(packageName, modQuery, "Exec: "+commandType, toolkit.Sprintf("Data %v already exist", idValue)) } } //-- insert the data q.data = append(q.data, dataMs...) } else if commandType == dbox.QueryPartUpdate { //-- valida if !hasData { return err.Error(packageName, modQuery, "Exec: "+commandType, "Data is empty") } var dataUpdate toolkit.M var updateDataIndex int // if it is a slice then we need to update each data passed on its slice isDataSlice := toolkit.IsSlice(data) if isDataSlice == false { isDataSlice = false e = toolkit.Serde(data, &dataUpdate, "") if e != nil { return err.Error(packageName, modQuery, "Exec: "+commandType, "Serde data fail"+e.Error()) } } var idField string //toolkit.Printf("Indexes: %s\n", toolkit.JsonString(indexes)) for i, v := range q.data { // update only data that match given inde if toolkit.HasMember(indexes, i) || !hasWhere { if idField == "" { idField = toolkit.IdField(v) if idField == "" { return err.Error(packageName, modQuery, "Exec: "+commandType, "No ID") } } // If dataslice is sent, iterate f if isDataSlice { e = toolkit.Serde(toolkit.SliceItem(data, updateDataIndex), &dataUpdate, "") if e != nil { return err.Error(packageName, modQuery, "Exec: "+commandType, "Serde data fail "+e.Error()) } updateDataIndex++ } dataOrigin := q.data[i] toolkit.CopyM(&dataUpdate, &dataOrigin, false, []string{"_id"}) toolkit.Serde(dataOrigin, &v, "") q.data[i] = v } } } else if commandType == dbox.QueryPartDelete { if hasWhere && len(where) > 0 { indexes := dbox.Find(q.data, where) if len(indexes) > 0 { newdata := []toolkit.M{} for index, v := range q.data { partOfIndex := toolkit.HasMember(indexes, index) if partOfIndex == false { newdata = append(newdata, v) } //toolkit.Println("i:", indexes, ", index:", index, ", p.ofIndex: ", partOfIndex, ", data: ", toolkit.JsonString(newdata)) } q.data = newdata } } else { q.data = []toolkit.M{} } //toolkit.Printf("Data now: %s\n", toolkit.JsonString(q.data)) } else if commandType == dbox.QueryPartSave { if !hasData { return err.Error(packageName, modQuery, "Exec: "+commandType, "Data is empty") } var dataMs []toolkit.M var dataM toolkit.M if !toolkit.IsSlice(data) { e = toolkit.Serde(&data, &dataM, "json") if e != nil { return err.Error(packageName, modQuery, "Exec: "+commandType+" Serde data fail", e.Error()) } dataMs = append(dataMs, dataM) } else { e = toolkit.Serde(&data, &dataMs, "json") if e != nil { return err.Error(packageName, modQuery, "Exec: "+commandType+" Serde data fail", e.Error()) } } //toolkit.Printf("Saving: %s\n", toolkit.JsonString(dataMs)) for _, v := range dataMs { idField, idValue := toolkit.IdInfo(v) indexes := dbox.Find(q.data, []*dbox.Filter{dbox.Eq(idField, idValue)}) if len(indexes) == 0 { q.data = append(q.data, v) } else { dataOrigin := q.data[indexes[0]] //toolkit.Printf("Copy data %s to %s\n", toolkit.JsonString(v), toolkit.JsonString(dataOrigin)) toolkit.CopyM(&v, &dataOrigin, false, []string{idField}) q.data[indexes[0]] = dataOrigin } } } e = q.writeFile() if e != nil { return err.Error(packageName, modQuery, "Exec: "+commandType+" Write fail", e.Error()) } return nil }