func writeCsv(ch chan []string) { var writer *csv.Writer if outputFn == "" { writer = csv.NewWriter(os.Stdout) } else { file, err := os.Create(outputFn) if err != nil { fmt.Println("Error:", err) os.Exit(1) } defer file.Close() writer = csv.NewWriter(file) } r, _ := utf8.DecodeRuneInString(outputSep) writer.Comma = r for row := range ch { err := writer.Write(row) if err != nil { fmt.Println("Error:", err) close(ch) return } } writer.Flush() }
func main() { flag.Parse() if *showVersion { fmt.Printf("json2csv v1.1\n") return } var reader *bufio.Reader var writer *csv.Writer if *inputFile != "" { file, err := os.OpenFile(*inputFile, os.O_RDONLY, 0600) if err != nil { log.Printf("Error %s opening %v", err, *inputFile) return } reader = bufio.NewReader(file) } else { reader = bufio.NewReader(os.Stdin) } if *outputFile != "" { file, err := os.OpenFile(*outputFile, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600) if err != nil { log.Printf("Error %s opening outputFile %v", err, *outputFile) } writer = csv.NewWriter(file) } else { writer = csv.NewWriter(os.Stdout) } json2csv(reader, writer, keys) }
func zipOutput1(username string, wholeList, responce map[string]string, undesireables []map[string]string) { undesireableNames := []string{"Unsubscribes", "Bounce", "Invalids", "Blocks", "Spam Reports"} wholelistname := username + "DONOTSEND.csv" donotsendname := username + "newlist.csv" outputFile, _ := os.Create(wholelistname) defer outputFile.Close() csvOutput := csv.NewWriter(outputFile) for i := 0; i < len(undesireables); i++ { csvOutput.Write([]string{undesireableNames[i]}) for k, _ := range undesireables[i] { csvOutput.Write([]string{k}) csvOutput.Flush() } csvOutput.Write([]string{}) } newlistOutputFile, _ := os.Create(donotsendname) defer newlistOutputFile.Close() newlistCsvOutput := csv.NewWriter(newlistOutputFile) for k, _ := range wholeList { newlistCsvOutput.Write([]string{k}) newlistCsvOutput.Flush() } // Create a buffer to write our archive to. buf := new(bytes.Buffer) // Create a new zip archive. w := zip.NewWriter(buf) // Add some files to the archive. var files = []struct { Name, Body string }{ {wholelistname, "This archive contains your new list."}, {donotsendname, "This archive contains the emails that should not be sent to again."}, } for _, file := range files { f, err := w.Create(file.Name) if err != nil { log.Fatal(err) } _, err = f.Write([]byte(file.Body)) if err != nil { log.Fatal(err) } } // Make sure to check the error on Close. err := w.Close() if err != nil { log.Fatal(err) } //write the zipped file to the disk ioutil.WriteFile(username+"_.zip", buf.Bytes(), 0777) }
func processCompact(sess rets.Requester, ctx context.Context, req rets.SearchRequest, output *string) { w := csv.NewWriter(os.Stdout) if *output != "" { os.MkdirAll(*output, 0777) f, _ := os.Create(*output + "/results.csv") defer f.Close() w = csv.NewWriter(f) } defer w.Flush() // loop over all the pages we need for { fmt.Printf("Querying next page: %v\n", req) result, err := rets.SearchCompact(sess, ctx, req) if err != nil { panic(err) } switch result.Response.Code { case rets.StatusOK: // we got some daters case rets.StatusNoRecords: return case rets.StatusSearchError: fallthrough default: // shit hit the fan panic(errors.New(result.Response.Text)) } count := 0 if count == 0 { w.Write(result.Columns) } hasMoreRows, err := result.ForEach(func(row rets.Row, err error) error { if err != nil { return err } w.Write(row) count++ return err }) result.Close() if err != nil { panic(err) } if !hasMoreRows { return } if req.Offset == 0 { req.Offset = 1 } req.Offset = req.Offset + count } }
func DataWriter(output *string) *csv.Writer { if *output == "stdout" { return csv.NewWriter(os.Stdout) } else { outfile, err := os.Create(*output) if err != nil { fmt.Println(err) panic(err) } return csv.NewWriter(outfile) } }
// RunCommitters lists stargazers by commits to subscribed repos, from // most prolific committer to least. func RunCommitters(c *fetch.Context, sg []*fetch.Stargazer, rs map[string]*fetch.Repo) error { log.Infof("running committers analysis") // Open file and prepare. f, err := createFile(c, "committers.csv") if err != nil { return util.Errorf("failed to create file: %s", err) } defer f.Close() w := csv.NewWriter(f) if err := w.Write([]string{"Login", "Email", "Commits", "Additions", "Deletions"}); err != nil { return util.Errorf("failed to write to CSV: %s", err) } // Sort the stargazers. slice := Contributors(sg) sort.Sort(slice) // Now accumulate by days. for _, s := range slice { c, a, d := s.TotalCommits() if c == 0 { break } if err := w.Write([]string{s.Login, s.Email, strconv.Itoa(c), strconv.Itoa(a), strconv.Itoa(d)}); err != nil { return util.Errorf("failed to write to CSV: %s", err) } } w.Flush() log.Infof("wrote committers analysis to %s", f.Name()) return nil }
func newDroid(w io.Writer) *droidWriter { return &droidWriter{ parents: make(map[string]parent), rec: make([]string, 18), w: csv.NewWriter(os.Stdout), } }
func ExportHandler(w http.ResponseWriter, r *http.Request) { if r.Method == "POST" { language := r.FormValue("language") fmt.Println("Exporting in", language) translations := model.GetPreferredTranslations(language) w.Header().Set("Content-Encoding", "UTF-8") w.Header().Set("Content-Type", "application/csv; charset=UTF-8") w.Header().Set("Content-Disposition", "attachment; filename=\""+model.LanguageNames[language]+".csv\"") out := csv.NewWriter(w) out.Write([]string{ "Original", "Part of", "Translation", }) for _, translation := range translations { out.Write([]string{ translation.Entry.Original, translation.Entry.PartOf, translation.Translation, }) } out.Flush() return } else { renderTemplate("export", w, r, func(data TemplateData) TemplateData { return data }) } }
// New writes all of the Rows in a Table to a CSV file. func New(source optimus.Table, filename string) error { fout, err := os.Create(filename) defer fout.Close() if err != nil { return err } writer := csv.NewWriter(fout) headers := []string{} wroteHeader := false for row := range source.Rows() { if !wroteHeader { headers = convertRowToHeader(row) if err := writer.Write(headers); err != nil { return err } wroteHeader = true } if err := writer.Write(convertRowToRecord(row, headers)); err != nil { return err } } if source.Err() != nil { return source.Err() } writer.Flush() if writer.Error() != nil { return writer.Error() } return nil }
// Scrap events for the specified lowerEventID (inclusive) to // upperEventID (exclusive). Writes scraped events to the specified fileName as CSV. func scrapEvents(lowerEventID int, upperEventID int, fileName string) { delta := upperEventID - lowerEventID if delta < 0 { panic("lowerEventID (" + string(lowerEventID) + ") is less than upperEventID (" + string(upperEventID) + ")") } csvFile, err := os.Create(fileName) if err != nil { panic(err) } defer csvFile.Close() writer := csv.NewWriter(csvFile) writtenEvents := 0 for i := 0; i < delta; i++ { scrapedEvent := scrapEvent(lowerEventID + i) if scrapedEvent != nil { writer.Write(scrapedEvent) writtenEvents++ } } writer.Flush() fmt.Println("number of scraped events written: ", writtenEvents) }
func prepareCSV(ctx *Context) error { // Collect all the data we need from the database. log.Println("Retrieving records...") rows, err := ctx.In.Connection.Query("SELECT token, vendor, app_id, language, user_info FROM devices") if err != nil { return err } defer rows.Close() // Open the output file. log.Printf("Opening %s for output...", ctx.Out.CSVFile) file, err := os.OpenFile(ctx.Out.CSVFile, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666) if err != nil { return err } // Create a CSV writer and dump the records in it. writer := csv.NewWriter(file) log.Println("Exporting records...") for rows.Next() { d := NewDevice() rows.Scan(&d.Token, &d.Vendor, &d.AppID, &d.Language, &d.UserInfo) if err := writer.Write(ctx.Exporter.ToCSV(d)); err != nil { return err } } writer.Flush() return nil }
// Analyze all significant dates with a histogram. But also write any significant data // to a csv file so we can do further processing in R func AnalyzeAllResponses(a *Analyzer, ar []*Response) { f, err := os.Create("swing-data.csv") if err != nil { panic(err) } defer f.Close() w := csv.NewWriter(f) defer w.Flush() Range := histogram.Range(-1.0, 200, .01) h, err := histogram.NewHistogram(Range) if err != nil { panic(err) } for _, resp := range ar { for _, oi := range a.AnalyzeStock(resp) { var toWrite = []string{ strconv.FormatFloat(oi.Swing, 'f', 4, 64), strconv.FormatFloat(oi.Ret, 'f', 4, 64), } w.Write(toWrite) h.Add(oi.Ret) } } fmt.Println("MEAN: ", h.Mean()) fmt.Println("SIGMA ", h.Sigma()) }
func SaveDebtData(l []DebtItem, filename string) error { csvfile, err := os.OpenFile(filename, os.O_RDWR|os.O_CREATE, 0660) if err != nil { if _, err := os.Stat(filename); err != nil { csvfile, _ = os.Create(filename) } else { fmt.Printf("Error opening debt file: %v", err) panic(err) } } defer csvfile.Close() writer := csv.NewWriter(csvfile) for _, item := range l { err = writer.Write([]string{item.Debtor.Name, item.Creditor.Name, fmt.Sprintf("%d", item.Amount.Cents), item.Note, strconv.Itoa(int(item.Creation.Unix()))}) if err != nil { fmt.Println(err) } } err = writer.Error() if err != nil { fmt.Println(err) } writer.Flush() return nil }
func main() { if len(util.FlagCpuProf) > 0 { f := util.CreateFile(util.FlagCpuProf) pprof.StartCPUProfile(f) defer f.Close() defer pprof.StopCPUProfile() } if len(flagGobIt) > 0 { astralDir := util.Arg(0) dists := readAlignmentDists(astralDir) enc := gob.NewEncoder(util.CreateFile(flagGobIt)) util.Assert(enc.Encode(dists), "Could not GOB encode distances") return } var dists *intern.Table if util.IsDir(util.Arg(0)) { dists = readAlignmentDists(util.Arg(0)) } else { dec := gob.NewDecoder(util.OpenFile(util.Arg(0))) util.Assert(dec.Decode(&dists), "Could not GOB decode distances") } treeFile := util.Arg(1) outPath := util.Arg(2) treeReader := newick.NewReader(util.OpenFile(treeFile)) tree, err := treeReader.ReadTree() util.Assert(err, "Could not read newick tree") csvw := csv.NewWriter(util.CreateFile(outPath)) clusters := treeClusters(flagThreshold, dists, tree) util.Assert(csvw.WriteAll(clusters)) }
func main() { app := initApp() app.Action = func(c *cli.Context) { // Validate arguments opts, err := validateArgs(c) if err != nil { log.WithFields(log.Fields{ "message": err, }).Fatal("invalid arguments") } // Parse input, send, and receieve results header, results, err := sendAndReceive(opts.Input) if err != nil { log.WithFields(log.Fields{ "message": err, }).Fatal("error during send and receive") } // Check for empty results (not probable) if len(results) < 1 { log.Info("No results!") return } // Sort and output results sort.Sort(ByASNumber(results)) // Write to CSV if neecessary if opts.Output != "" { f, err := os.Create(opts.Output) if err != nil { log.Fatal(err) } defer f.Close() w := csv.NewWriter(f) header := []string{"AS", "IP", "BGP Prefix", "CC", "Registry", "Allocated", "AS Name"} err = w.Write(header) if err != nil { log.Fatal(err) } for _, res := range results { w.Write(res.CSVRecord()) } w.Flush() log.WithFields(log.Fields{ "path": opts.Output, }).Info("results saved to csv") } else { fmt.Printf(header) for _, res := range results { fmt.Printf("%s\n", res.String()) } } } app.Run(os.Args) }
func LiveExportHandler(w http.ResponseWriter, r *http.Request) { if r.Method == "POST" { fmt.Println("Exporting live translations") translations := model.GetLiveTranslations() w.Header().Set("Content-Encoding", "UTF-8") w.Header().Set("Content-Type", "application/csv; charset=UTF-8") w.Header().Set("Content-Disposition", "attachment; filename=\"live.csv\"") out := csv.NewWriter(w) out.Write([]string{ "Original", "Part of", "Language", "Translation", }) for _, translation := range translations { for _, part := range translation.Parts { out.Write([]string{ part.Entry.Original, part.Entry.PartOf, translation.Language, part.Translation, }) } } out.Flush() return } }
func (q *Query) startWriteMode() error { var e error ci := q.Connection().(*Connection).Info() if q.newFileWrite { q.tempfile, e = os.OpenFile(q.filePath+".temp", os.O_CREATE, 0666) } else { q.tempfile, e = os.OpenFile(q.filePath, os.O_RDWR|os.O_APPEND, 0666) } if e != nil { return e } q.writer = csv.NewWriter(q.tempfile) if delimiter := ci.Settings.Get("delimiter", "").(string); delimiter != "" { q.writer.Comma = rune(delimiter[0]) } if q.newFileWrite && q.isUseHeader && len(q.headerColumn) > 0 { datatemp := make([]string, 0) for _, v := range q.headerColumn { datatemp = append(datatemp, v.name) } q.writer.Write(datatemp) q.writer.Flush() } return nil }
// NewCSVExportOutput returns a CSVExportOutput configured to write output to the // given io.Writer, extracting the specified fields only. func NewCSVExportOutput(fields []string, out io.Writer) *CSVExportOutput { return &CSVExportOutput{ fields, 0, csv.NewWriter(out), } }
func (res *Result) CSV() []byte { buff := &bytes.Buffer{} writer := csv.NewWriter(buff) writer.Write(res.Columns) for _, row := range res.Rows { record := make([]string, len(res.Columns)) for i, item := range row { if item != nil { record[i] = fmt.Sprintf("%v", item) } else { record[i] = "" } } err := writer.Write(record) if err != nil { fmt.Println(err) break } } writer.Flush() return buff.Bytes() }
func Run() { records := [][]string{ {"first name", "last name", "username"}, {"Rob", "Pike", "rob"}, {"Ken", "Thompson", "ken"}, {"Robert", "Griesemer", "gri"}, } fmt.Println(records[1][0]) w := csv.NewWriter(os.Stdout) w.Comma = ':' for _, record := range records { if err := w.Write(record); err != nil { log.Fatalln("error while writing record to csv", err) } } w.Flush() if err := w.Error(); err != nil { log.Fatal(err) } }
func (dir *Directory) save(i int64, l [][]string) error { b := new(bytes.Buffer) w := csv.NewWriter(b) w.WriteAll(l) w.Flush() // done! part := strconv.FormatInt(i, 10) + "." p, e := LoadDPs(dir.R, part) if e != nil { p = new(DirPage) e = nil } p.Version = (p.Version + 1) % 8 e = SaveDPs(dir.R, p, part) if e != nil { return e } pver := part + strconv.FormatInt(p.Version, 10) f, e := dir.R.Create(pver) if e != nil { return e } defer f.Close() e = f.Truncate(int64(b.Len())) if e != nil { return e } _, e = f.WriteAt(b.Bytes(), 0) return e }
func (m *Merki) Filter(fileName, measure string, gi GroupingInterval, gt GroupingType) error { w := csv.NewWriter(os.Stdout) w.Comma = m.delimiter filter := NewFilter(w, measure, gi, gt) parser := NewParser(string(m.delimiter)) go parser.ParseFile(fileName) err := func() error { for { select { case record := <-parser.Record: if err := filter.Add(record); err != nil { return err } case err := <-parser.Error: return err case <-parser.Done: return nil } } }() if err != nil { return err } err = filter.Print() if err != nil { return err } w.Flush() if err := w.Error(); err != nil { return err } return nil }
// Save [...] func (g *Graph) Save(csvFile io.Writer) error { startT := time.Now() defer func() { log.Info("Graph.Save", time.Since(startT)) }() csvWriter := csv.NewWriter(csvFile) triples, err := g.Triples(SPEMPTY, SPEMPTY, nil, nil) if err != nil { log.Error(err) return err } for _, triple := range triples { sub, pred, err := SubPred(triple[0], triple[1]) if err == nil { switch triple[2].(type) { case string: csvWriter.Write([]string{sub, pred, fmt.Sprintf("%s", triple[2])}) case float64, float32: csvWriter.Write([]string{sub, pred, fmt.Sprintf("%f", triple[2])}) case int, uint, uint32, uint64: csvWriter.Write([]string{sub, pred, fmt.Sprintf("%d", triple[2])}) } } } csvWriter.Flush() return nil }
func dumpInCSVFormat(fields []string, rows [][]string) string { var buf bytes.Buffer writer := csv.NewWriter(&buf) for i, field := range fields { fields[i] = strings.Replace(field, "\n", "\\n", -1) } if len(fields) > 0 { writer.Write(fields) } for _, row := range rows { for i, field := range row { field = strings.Replace(field, "\n", "\\n", -1) field = strings.Replace(field, "\r", "\\r", -1) row[i] = field } writer.Write(row) } writer.Flush() csv := buf.String() return csv }
func newCSV(w io.Writer) *csvWriter { l := 11 if *hashf != "" { l = 12 } return &csvWriter{make([]string, l), csv.NewWriter(os.Stdout)} }
func (coarsedb *CoarseDB) saveSeedsPlain() error { Vprintf("Writing %s...\n", FileCoarsePlainSeeds) timer := time.Now() csvWriter := csv.NewWriter(coarsedb.plainSeeds) record := make([]string, 0, 10) for i := 0; i < coarsedb.Seeds.powers[coarsedb.Seeds.SeedSize]; i++ { if coarsedb.Seeds.Locs[i] == nil { continue } record = record[:0] record = append(record, string(coarsedb.Seeds.unhashKmer(i))) for loc := coarsedb.Seeds.Locs[i]; loc != nil; loc = loc.Next { record = append(record, fmt.Sprintf("%d", loc.SeqInd), fmt.Sprintf("%d", loc.ResInd)) } if err := csvWriter.Write(record); err != nil { return err } } csvWriter.Flush() Vprintf("Done writing %s (%s).\n", FileCoarsePlainSeeds, time.Since(timer)) return nil }
func main() { session, err := mgo.Dial(mongoURI()) if err != nil { log.Fatal(err) } defer session.Close() systemsC := session.DB("").C("systems") flag.Parse() ctx := context.Background() kami.Context = ctx kami.Use("/api/", httpauth.SimpleBasicAuth(os.Getenv("BASIC_USERNAME"), os.Getenv("BASIC_PASSWORD"))) kami.Post("/api/v1/systems", func(ctx context.Context, w http.ResponseWriter, r *http.Request) { system := alpha.System{} err := json.NewDecoder(r.Body).Decode(&system) if err != nil { http.Error(w, err.Error(), 400) } _, err = systemsC.UpsertId(system.ID, &system) if err != nil { http.Error(w, err.Error(), 400) } log.Printf("System %+v recieved", system) fmt.Fprint(w, "OK") }) kami.Get("/api/v1/systems.csv", func(ctx context.Context, w http.ResponseWriter, r *http.Request) { systems := []alpha.System{} err := systemsC.Find(nil).All(&systems) if err != nil { log.Println(err.Error()) http.Error(w, err.Error(), 400) } csvWriter := csv.NewWriter(w) csvWriter.Write([]string{"ID", "Model", "Serial Number", "Hostname", "OS Name", "OS Version", "Memory Total", "Memory Speed", "Disk Type", "Battery Condition", "Battery Charge Cycles", "Users"}) for _, s := range systems { users := []string{} for _, user := range s.Users { users = append(users, user.Name) } csvWriter.Write([]string{s.ID, s.Model, s.SerialNumber, s.Hostname, s.OS.Name, s.OS.Version, s.Memory.Total, s.Memory.Speed, s.Storage.Devices[0].Type, s.Battery.Condition, s.Battery.CycleCount, strings.Join(users, " | ")}) } w.Header().Set("Content-Type", "text/csv") w.Header().Set("Content-Disposition", "attachment;filename=systems.csv") csvWriter.Flush() }) kami.Serve() }
func WriteCSV(writer http.ResponseWriter, result []bson.M) { data := make([][]string, 0) headers := make([]string, 0) for key, _ := range result[0] { headers = append(headers, key) } data = append(data, headers) // each map[string]interface{} in result for _, item := range result { row := make([]string, 0) // each key/value pair in item data = append(data, row) for _, value := range item { if point, ok := value.([]interface{}); ok { row = append(row, point[0].(string)) } else if point, ok := value.(string); ok { row = append(row, point) } else if point, ok := value.(bson.ObjectId); ok { row = append(row, point.String()) } } data = append(data, row) } csvWriter := csv.NewWriter(writer) err := csvWriter.WriteAll(data) if err != nil { fmt.Fprintf(writer, err.Error()) } }
func NewDictWriter(fh io.Writer, fieldnames []string) (*DictWriter, error) { writer := gocsv.NewWriter(fh) dw := DictWriter{Writer: writer, Fieldnames: fieldnames} return &dw, nil }
// FileTokenStorage.StoreToken saves a token as a csv file func (store *FileTokenStorage) StoreToken(token *oauth2.Token) error { if store.StoragePath == "" { return errors.New("Cannot store token: StoragePath not set.") } tokenIdStr := fmt.Sprint(store.TokenId) if store.TokenId == nil || tokenIdStr == "" { return errors.New("Cannot store token: TokenId not set.") } filename := path.Join(store.StoragePath, tokenIdStr+".csv") f, err := os.Create(filename) if err != nil { return err } record := []string{token.AccessToken, token.RefreshToken, token.Expiry.String(), token.TokenType} w := csv.NewWriter(f) if err = w.Write(record); err != nil { return err } w.Flush() if err := w.Error(); err != nil { return err } return nil }