func TestRunSedotanReadRecHistory(t *testing.T) { t.Skip("Skip : Comment this line to do test") arrcmd := make([]string, 0, 0) result := toolkit.M{} arrcmd = append(arrcmd, EC_APP_PATH+`\bin\sedotanread.exe`) arrcmd = append(arrcmd, `-readtype=rechistory`) arrcmd = append(arrcmd, `-recfile=E:\EACIIT\src\github.com\eaciit\colony-app\data-root\webgrabber\historyrec\irondcecomcn.Iron01-20160316022830.csv`) if runtime.GOOS == "windows" { cmd = exec.Command(arrcmd[0], arrcmd[1:]...) rechistory, err := toolkit.RunCommand(arrcmd[0], arrcmd[1:]...) err = toolkit.UnjsonFromString(rechistory, &result) if err != nil { t.Errorf("Error, %s \n", err) } byteoutput, err := cmd.CombinedOutput() if err != nil { // Log.AddLog(fmt.Sprintf("[%v] run at %v, found error : %v", eid, sedotan.DateToString(thistime), err.Error()), "ERROR") } err = toolkit.UnjsonFromString(string(byteoutput), &result) } else { // cmd = exec.Command("sudo", "../daemon/sedotandaemon", `-config="`+tbasepath+`\config-daemon.json"`, `-logpath="`+tbasepath+`\log"`) } fmt.Println(result) }
/* Execute command depend on type that has been declared before */ func (c *Command) Exec() (string, error) { var ( res string e error httpRes *http.Response ) res = "initial" e = fmt.Errorf("Command %s %s can't be executed. No valid implementation can be found") if c.Type == CommandType_Local { ps := []string{} if c.CommandParms != nil { ps = c.CommandParms } res, e = toolkit.RunCommand(c.CommandText, ps...) } else if c.Type == CommandType_SSH { ps := []string{c.CommandText} res, e = c.SshClient.RunCommandSsh(ps...) } else if c.Type == CommandType_REST { if c.RESTAuthType == RESTAuthType_None { httpRes, e = toolkit.HttpCall(c.RESTUrl, c.RESTMethod, nil, nil) } else if c.RESTAuthType == RESTAuthType_Basic { var config = map[string]interface{}{"auth": "basic", "user": c.RESTUser, "password": c.RESTPassword} httpRes, e = toolkit.HttpCall(c.RESTUrl, c.RESTMethod, nil, config) // httpRes, e = toolkit.HttpCall(c.RESTUrl, c.RESTMethod, nil, true, c.RESTUser, c.RESTPassword) } res = toolkit.HttpContentString(httpRes) } return res, e }
func (p *Ping) checkCommand() error { ps := []string{} if p.CommandParms != nil { ps = p.CommandParms } res, e := toolkit.RunCommand(p.Command, ps...) if e != nil { return e } if p.ResponseType == Response_Equals { if res != p.ResponseValue { return fmt.Errorf("Response is not valid. Expecting for %s", p.ResponseValue) } } else if p.ResponseType == Response_Contains { if !strings.Contains(res, p.ResponseValue) { return fmt.Errorf("Phrase %s could not be found on response", p.ResponseValue) } } else if p.ResponseType == Response_RegEx { match, _ := regexp.MatchString(p.ResponseValue, res) if !match { return fmt.Errorf("Response is not valid. Not match with pattern %s", p.ResponseValue) } } return nil }
func main() { o, e := toolkit.RunCommand("go", "env") if e == nil { fmt.Printf("Command %s result \n%v\n", "ls -al", o) } else { fmt.Printf("Unable to run command: %s \n", e.Error()) } }
func TestRunSedotanReadHistory(t *testing.T) { t.Skip("Skip : Comment this line to do test") var history = toolkit.M{} arrcmd := make([]string, 0, 0) arrcmd = append(arrcmd, EC_APP_PATH+`\bin\sedotanread.exe`) arrcmd = append(arrcmd, `-readtype=history`) arrcmd = append(arrcmd, `-pathfile=`+EC_DATA_PATH+`\webgrabber\history\HIST-GRABDCE-20160316.csv`) if runtime.GOOS == "windows" { historystring, _ := toolkit.RunCommand(arrcmd[0], arrcmd[1:]...) err := toolkit.UnjsonFromString(historystring, &history) if err != nil { t.Errorf("Error, %s \n", err) } } else { // cmd = exec.Command("sudo", "../daemon/sedotandaemon", `-config="`+tbasepath+`\config-daemon.json"`, `-logpath="`+tbasepath+`\log"`) } fmt.Println(history) }
func TestRunSedotanReadSnapshot(t *testing.T) { t.Skip("Skip : Comment this line to do test") arrcmd := make([]string, 0, 0) result := toolkit.M{} arrcmd = append(arrcmd, EC_APP_PATH+`\bin\sedotanread.exe`) arrcmd = append(arrcmd, `-readtype=snapshot`) arrcmd = append(arrcmd, `-pathfile=`+EC_DATA_PATH+`\daemon\daemonsnapshot.csv`) arrcmd = append(arrcmd, `-nameid=irondcecomcn`) if runtime.GOOS == "windows" { SnapShot, err := toolkit.RunCommand(arrcmd[0], arrcmd[1:]...) err = toolkit.UnjsonFromString(SnapShot, &result) if err != nil { t.Errorf("Error, %s \n", err) } } else { // cmd = exec.Command("sudo", "../daemon/sedotandaemon", `-config="`+tbasepath+`\config-daemon.json"`, `-logpath="`+tbasepath+`\log"`) } fmt.Println(result) }
func (pkg *PackageModel) WriteBase(path string) error { filename := filepath.Join(path, "base.go") f, e := os.Open(filename) if e == nil { os.Remove(filename) } f, e = os.Create(filename) if e != nil { return toolkit.Errorf("Failed to write %s: %s", "base.go", e.Error()) } defer f.Close() b := bufio.NewWriter(f) b.WriteString(toolkit.Formatf(baseGo, pkg.Name)) e = b.Flush() if e != nil { return toolkit.Errorf("Failed to write base.go: %s", e.Error()) } toolkit.RunCommand("/bin/sh", "-c", "gofmt -w "+filename) return nil }
func (d *DataGrabberController) Transform(dataGrabber *colonycore.DataGrabber) (bool, []toolkit.M, string) { logConf, err := d.getLogger(dataGrabber) if err != nil { logConf.AddLog(err.Error(), "ERROR") defer logConf.Close() } message := fmt.Sprintf("===> Transformation started! %s -> %s interval %d %s", dataGrabber.DataSourceOrigin, dataGrabber.DataSourceDestination, dataGrabber.GrabInterval, dataGrabber.IntervalType) logConf.AddLog(message, "SUCCESS") fmt.Println(message) dsOrigin := new(colonycore.DataSource) err = colonycore.Get(dsOrigin, dataGrabber.DataSourceOrigin) if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } dsDestination := new(colonycore.DataSource) err = colonycore.Get(dsDestination, dataGrabber.DataSourceDestination) if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } dataDS, _, conn, query, metaSave, err := new(DataSourceController). ConnectToDataSource(dataGrabber.DataSourceOrigin) if len(dataDS.QueryInfo) == 0 { message := "Data source origin has invalid query" logConf.AddLog(message, "ERROR") return false, nil, message } if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } defer conn.Close() if metaSave.keyword != "" { message := `Data source origin query is not "Select"` logConf.AddLog(message, "ERROR") return false, nil, message } cursor, err := query.Cursor(nil) if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } defer cursor.Close() data := []toolkit.M{} err = cursor.Fetch(&data, 0, false) if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } connDesc := new(colonycore.Connection) err = colonycore.Get(connDesc, dsDestination.ConnectionID) if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } const FLAG_ARG_DATA string = `%1` transformedData := []toolkit.M{} for _, each := range data { eachTransformedData := toolkit.M{} for _, eachMap := range dataGrabber.Maps { var valueEachSourceField interface{} // ============================================ SOURCE if !strings.Contains(eachMap.Source, "|") { // source could be: field, object, array valueEachSourceField = each.Get(eachMap.Source) } else { // source could be: field of object, field of array-objects prev := strings.Split(eachMap.Source, "|")[0] next := strings.Split(eachMap.Source, "|")[1] var fieldInfoDes *colonycore.FieldInfo = nil for _, eds := range dsOrigin.MetaData { if eds.ID == prev { fieldInfoDes = eds break } } if fieldInfoDes != nil { // source is field of array-objects if fieldInfoDes.Type == "array-objects" { valueObjects := []interface{}{} if temp, _ := each.Get(prev, nil).([]interface{}); temp != nil { valueObjects = make([]interface{}, len(temp)) for i, each := range temp { if tempSub, _ := toolkit.ToM(each); tempSub != nil { valueObjects[i] = tempSub.Get(next) } } } valueEachSourceField = valueObjects } else { // source is field of object valueObject := toolkit.M{} if valueObject, _ = toolkit.ToM(each.Get(prev)); valueObject != nil { valueEachSourceField = valueObject.Get(next) } } } } // ============================================ DESTINATION if !strings.Contains(eachMap.Destination, "|") { if eachMap.SourceType == "object" { sourceObject, _ := toolkit.ToM(valueEachSourceField) if sourceObject == nil { sourceObject = toolkit.M{} } valueObject := toolkit.M{} for _, desMeta := range dsDestination.MetaData { if desMeta.ID == eachMap.Destination { for _, eachMetaSub := range desMeta.Sub { // valueObject.Set(eachMetaSub.ID, sourceObject.Get(eachMetaSub.ID)) valueObject.Set(eachMetaSub.ID, d.convertTo(sourceObject.Get(eachMetaSub.ID), eachMap.DestinationType)) } break } } eachTransformedData.Set(eachMap.Destination, valueObject) } else if eachMap.SourceType == "array-objects" { sourceObjects, _ := valueEachSourceField.([]interface{}) if sourceObjects == nil { sourceObjects = []interface{}{} } valueObjects := []interface{}{} for _, sourceObjectRaw := range sourceObjects { sourceObject, _ := toolkit.ToM(sourceObjectRaw) if sourceObject == nil { sourceObject = toolkit.M{} } valueObject := toolkit.M{} for _, desMeta := range dsDestination.MetaData { if desMeta.ID == eachMap.Destination { for _, eachMetaSub := range desMeta.Sub { // valueObject.Set(eachMetaSub.ID, sourceObject.Get(eachMetaSub.ID)) valueObject.Set(eachMetaSub.ID, d.convertTo(sourceObject.Get(eachMetaSub.ID), eachMap.DestinationType)) } break } } valueObjects = append(valueObjects, valueObject) } eachTransformedData.Set(eachMap.Destination, valueObjects) } else { if strings.Contains(eachMap.DestinationType, "array") { valueObjects := each.Get(eachMap.Source) eachTransformedData.Set(eachMap.Destination, valueObjects) } else { // eachTransformedData.Set(eachMap.Destination, convertDataType(eachMap.DestinationType, eachMap.Source, each)) eachTransformedData.Set(eachMap.Destination, d.convertTo(each.Get(eachMap.Source), eachMap.DestinationType)) } } } else { prev := strings.Split(eachMap.Destination, "|")[0] next := strings.Split(eachMap.Destination, "|")[1] var fieldInfoDes *colonycore.FieldInfo = nil for _, eds := range dsDestination.MetaData { if eds.ID == prev { fieldInfoDes = eds break } } if fieldInfoDes != nil { if fieldInfoDes.Type == "array-objects" { valueObjects := []interface{}{} if temp := eachTransformedData.Get(prev, nil); temp == nil { valueObjects = []interface{}{} } else { valueObjects, _ = temp.([]interface{}) if valueObjects == nil { valueObjects = []interface{}{} } } if temp, _ := valueEachSourceField.([]interface{}); temp != nil { for i, eachVal := range temp { valueObject := toolkit.M{} if len(valueObjects) > i { if temp2, _ := toolkit.ToM(valueObjects[i]); temp2 != nil { valueObject = temp2 // valueObject.Set(next, eachVal) valueObject.Set(next, d.convertTo(eachVal, eachMap.DestinationType)) } valueObjects[i] = valueObject } else { if fieldInfoDes.Sub != nil { for _, subMeta := range fieldInfoDes.Sub { valueObject.Set(subMeta.ID, nil) } } // valueObject.Set(next, eachVal) valueObject.Set(next, d.convertTo(eachVal, eachMap.DestinationType)) valueObjects = append(valueObjects, valueObject) } } } eachTransformedData.Set(prev, valueObjects) } else { valueObject, _ := toolkit.ToM(eachTransformedData.Get(prev)) if valueObject == nil { valueObject = toolkit.M{} } //tambahan prevSource := strings.Split(eachMap.Source, "|")[0] nextSource := strings.Split(eachMap.Source, "|")[1] mval, _ := toolkit.ToM(each.Get(prevSource, nil)) //========= valueObject.Set(next, d.convertTo(mval.Get(nextSource), eachMap.DestinationType)) // valueObject.Set(next, convertDataType(eachMap.DestinationType, nextSource, mval)) eachTransformedData.Set(prev, valueObject) } } } } transformedData = append(transformedData, eachTransformedData) dataToSave := eachTransformedData // ================ pre transfer command if dataGrabber.PreTransferCommand != "" { // jsonTranformedDataBytes, err := json.Marshal(each) jsonTranformedDataBytes, err := json.Marshal(eachTransformedData) if err != nil { return false, nil, err.Error() } jsonTranformedData := string(jsonTranformedDataBytes) var preCommand = dataGrabber.PreTransferCommand if strings.Contains(dataGrabber.PreTransferCommand, FLAG_ARG_DATA) { preCommand = strings.TrimSpace(strings.Replace(dataGrabber.PreTransferCommand, FLAG_ARG_DATA, "", -1)) } dataToSave = toolkit.M{} output, err := toolkit.RunCommand(preCommand, jsonTranformedData) fmt.Printf("===> Pre Transfer Command Result\n COMMAND -> %s %s\n OUTPUT -> %s\n", preCommand, jsonTranformedData, output) if err == nil { postData := toolkit.M{} if err := json.Unmarshal([]byte(output), &postData); err == nil { dataToSave = postData } } } // ================ if len(dataToSave) == 0 { continue } nilFieldDest := eachTransformedData for _, metadataDest := range dsDestination.MetaData { if temp := eachTransformedData.Get(metadataDest.ID); temp == nil { if metadataDest.ID != "_id" { if metadataDest.Type == "object" { valueObject := toolkit.M{} for _, eachMetaSub := range metadataDest.Sub { valueObject.Set(eachMetaSub.ID, nil) } nilFieldDest.Set(metadataDest.ID, valueObject) } else if metadataDest.Type == "array-objects" { valueEachSourceField := each.Get(metadataDest.ID) sourceObjects, _ := valueEachSourceField.([]interface{}) if sourceObjects == nil { sourceObjects = []interface{}{} } valueObjects := []interface{}{} for _, sourceObjectRaw := range sourceObjects { sourceObject, _ := toolkit.ToM(sourceObjectRaw) if sourceObject == nil { sourceObject = toolkit.M{} } valueObject := toolkit.M{} for keyss, _ := range sourceObject { valueObject.Set(keyss, nil) } valueObjects = append(valueObjects, valueObject) } nilFieldDest.Set(metadataDest.ID, valueObjects) } else { if strings.Contains(metadataDest.Type, "array") { valueObjects := []interface{}{} nilFieldDest.Set(metadataDest.ID, valueObjects) } else { nilFieldDest.Set(metadataDest.ID, nil) } } } } } tableName := dsDestination.QueryInfo.GetString("from") queryWrapper := helper.Query(connDesc.Driver, connDesc.Host, connDesc.Database, connDesc.UserName, connDesc.Password, connDesc.Settings) if dataGrabber.InsertMode == "fresh" { queryWrapper.Delete(tableName, dbox.Or()) } if eachTransformedData.Has("_id") { err = queryWrapper.Delete(tableName, dbox.Eq("_id", eachTransformedData.Get("_id"))) } if toolkit.HasMember([]string{"json", "jsons", "csv", "csvs"}, connDesc.Driver) && strings.HasPrefix(connDesc.Host, "http") { queryWrapper = helper.Query(connDesc.Driver, connDesc.FileLocation, "", "", "", connDesc.Settings) } else { queryWrapper = helper.Query(connDesc.Driver, connDesc.Host, connDesc.Database, connDesc.UserName, connDesc.Password, connDesc.Settings) } if !nilFieldDest.Has("_id") || nilFieldDest.Get("_id") == nil || nilFieldDest.GetString("_id") == "<nil>" { nilFieldDest.Set("_id", helper.RandomIDWithPrefix("")) } err = queryWrapper.Save(tableName, nilFieldDest) if err != nil { logConf.AddLog(err.Error(), "ERROR") return false, nil, err.Error() } // ================ post transfer command if dataGrabber.PostTransferCommand != "" { eachTransformedData = dataToSave jsonTranformedDataBytes, err := json.Marshal(eachTransformedData) if err != nil { return false, nil, err.Error() } jsonTranformedData := string(jsonTranformedDataBytes) var postCommand = dataGrabber.PostTransferCommand if strings.Contains(dataGrabber.PostTransferCommand, FLAG_ARG_DATA) { postCommand = strings.TrimSpace(strings.Replace(dataGrabber.PostTransferCommand, FLAG_ARG_DATA, "", -1)) } output, err := toolkit.RunCommand(postCommand, jsonTranformedData) fmt.Printf("===> Post Transfer Command Result\n COMMAND -> %s %s\n OUTPUT -> %s\n", postCommand, jsonTranformedData, output) } } message = fmt.Sprintf("===> Success transforming %v data", len(transformedData)) logConf.AddLog(message, "SUCCESS") fmt.Println(message) return true, transformedData, "" }
func streamsavedata(intms <-chan toolkit.M, sQ dbox.IQuery, key string, dt toolkit.M) { var err error iN, note := 0, "" for intm := range intms { if destDboxs[key].desttype == "mongo" { intm.Set("_id", toolkit.GenerateRandomString("", 32)) } if len(intm) == 0 { continue } //Pre Execute Program if extCommand.Has("pre") && toolkit.ToString(extCommand["pre"]) != "" { sintm := toolkit.JsonString(intm) arrcmd := make([]string, 0, 0) // if runtime.GOOS == "windows" { // arrcmd = append(arrcmd, "cmd") // arrcmd = append(arrcmd, "/C") // } arrcmd = append(arrcmd, toolkit.ToString(extCommand["pre"])) arrcmd = append(arrcmd, sintm) // output, err := toolkit.RunCommand(arrcmd[0], arrcmd[1:]) output, err := toolkit.RunCommand(arrcmd[0], arrcmd[1]) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to execute pre external command :%s", key, err.Error()), "ERROR") note = "Error Found" continue } err = toolkit.UnjsonFromString(output, &intm) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to get pre external command output :%s", key, err.Error()), "ERROR") note = "Error Found" continue } } err = sQ.Exec(toolkit.M{ "data": intm, }) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to insert data [%s-%s]:%s", key, "csv", destDboxs[key].IConnection.Info().Host, err.Error()), "ERROR") note = "Error Found" continue } err = saverechistory(key, intm) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to insert record data [%s-%s]:%s", key, "csv", destDboxs[key].IConnection.Info().Host, err.Error()), "ERROR") note = "Error Found" } iN += 1 if math.Mod(float64(iN), 100) == 0 { _ = updatesnapshot(iN, key) dt = dt.Set("rowsaved", (toolkit.ToInt(dt.Get("rowsaved", 0), toolkit.RoundingAuto) + iN)) iN = 0 } //Post Execute Program if extCommand.Has("post") { sintm := toolkit.JsonString(intm) arrcmd := make([]string, 0, 0) // if runtime.GOOS == "windows" { // arrcmd = append(arrcmd, "cmd") // arrcmd = append(arrcmd, "/C") // } arrcmd = append(arrcmd, toolkit.ToString(extCommand["post"])) arrcmd = append(arrcmd, sintm) // output, err := toolkit.RunCommand(arrcmd[0], arrcmd[1:]) output, err := toolkit.RunCommand(arrcmd[0], arrcmd[1]) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to execute post external command :%s", key, err.Error()), "ERROR") note = "Error Found" continue } err = toolkit.UnjsonFromString(output, &intm) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to get post external command output :%s", key, err.Error()), "ERROR") note = "Error Found" continue } } } dt = dt.Set("note", note). Set("grabstatus", "done"). Set("rowsaved", (toolkit.ToInt(dt.Get("rowsaved", 0), toolkit.RoundingAuto) + iN)) _ = updatesnapshot(iN, key) err = savehistory(dt) if err != nil { Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Unable to save history : %s", key), "ERROR") } Log.AddLog(fmt.Sprintf("[savedatagrab.%s] Finish save data", key), "INFO") destDboxs[key].IConnection.Close() }
func (sm *StructModel) Write(pkg *PackageModel, path string) error { if pkg.Name == "" || sm.Name == "" { return toolkit.Errorf("Both package name and struct name should be defined") } //return toolkit.Errorf("Fail to write %s.%s : method Write is not yet implemented", pkg.Name, sm.Name) //-- write base e := pkg.WriteBase(path) if e != nil { return e } filename := filepath.Join(path, strings.ToLower(sm.Name)+".go") //currentCode := "" f, e := os.Open(filename) if e == nil { //bcurrent, _ := ioutil.ReadAll(f) //currentCode = string(bcurrent) os.Remove(filename) } f, e = os.Create(filename) if e != nil { return toolkit.Errorf("Failed to write %s.%s: %s", pkg.Name, sm.Name, e.Error()) } defer f.Close() txts := []string{} //--- package txts = append(txts, "package "+pkg.Name) //--- imports txts = append(txts, toolkit.Sprintf("import (%s)", libs(mandatoryLibs, pkg.ObjectLibs, sm.Libs))) //--- struct definition txts = append(txts, "type "+sm.Name+" struct {\n"+ "orm.ModelBase `bson:\"-\" json:\"-\"`") for _, f := range sm.Fields { if f.Type == "" { f.Type = "string" } fieldStr := toolkit.Sprintf("%s %s %s", f.Name, f.Type, f.Tag) txts = append(txts, fieldStr) } txts = append(txts, "}") //--- tablename pluralNames := strings.ToLower(sm.Name) if strings.HasSuffix(pluralNames, "s") { pluralNames = pluralNames + "es" } else { pluralNames = pluralNames + "s" } tablename := toolkit.Sprintf("func (o *%s) TableName()string{"+ "return \"%s\"\n"+ "}", sm.Name, pluralNames) txts = append(txts, tablename) //--- new fieldBuilders := "" for _, field := range sm.Fields { notEmpty := !toolkit.IsNilOrEmpty(field.Default) if notEmpty { def := toolkit.Sprintf("%v", field.Default) if field.Type == "string" { def = "\"" + def + "\"" } fieldBuilders += toolkit.Sprintf("o.%s=%s", field.Name, def) + "\n" } } newfn := "func New{0}() *{0}{\n" + "o:=new({0})\n" + fieldBuilders + "return o" + "}" newfn = toolkit.Formatf(newfn, sm.Name) txts = append(txts, newfn) //--- find tpl := `func {0}Find(filter *dbox.Filter, fields, orders string, limit, skip int) dbox.ICursor { config := makeFindConfig(fields, orders, skip, limit) if filter != nil { config.Set("where", filter) } c, _ := DB().Find(new({0}), config) return c }` txts = append(txts, toolkit.Formatf(tpl, sm.Name)) //--- get tpl = `func {0}Get(filter *dbox.Filter, orders string, skip int) (emp *{0}, err error) { config := makeFindConfig("", orders, skip, 1) if filter != nil { config.Set("where", filter) } c, ecursor := DB().Find(new({0}), config) if ecursor != nil { return nil, ecursor } defer c.Close() emp = new({0}) err = c.Fetch(emp, 1, false) return emp, err }` txts = append(txts, toolkit.Formatf(tpl, sm.Name)) //-- method & get for _, method := range sm.Methods { txts = append(txts, sm.buildMethod( pkg, method.Type, method.Field)) } b := bufio.NewWriter(f) for _, txt := range txts { b.WriteString(txt + "\n") } e = b.Flush() if e != nil { return toolkit.Errorf("Failed to write %s.%s: %s", pkg.Name, sm.Name, e.Error()) } toolkit.RunCommand("/bin/sh", "-c", "gofmt -w "+filename) return nil }