func main() { filecsv := `C:\Users\yejianfeng\Desktop\mxm\skill.csv` file, err := os.Open(filecsv) if err != nil { panic(err) } defer file.Close() reader := csv.NewReader(file) roleFolder := `C:\Users\yejianfeng\Desktop\mxm\skill\` reader.Read() for { fields, err := reader.Read() if err == io.EOF { break } picurl := fields[15] jpg := roleFolder + picurl + ".jpg" _, err = os.Open(jpg) if err == nil || os.IsExist(err) { continue } png := roleFolder + picurl + ".png" _, err = os.Open(png) if err == nil || os.IsExist(err) { continue } fmt.Println(fields[0]) } }
func loadCsvFile(name, urlpath string, timestamp time.Time, force bool) (*csv.Reader, error) { filename := filepath.Join(csvDir, name+timestamp.Format(".2006-01-02")+".csv") if !force { file, err := os.Open(filename) if err != nil { if !os.IsNotExist(err) { return nil, err } } else { return csv.NewReader(file), nil } } log.Info("Downloading %s.csv from Google Spreadsheets", name) resp, err := http.Get(csvUrlPrefix + urlpath + csvUrlSuffix) if err != nil { return nil, err } defer resp.Body.Close() body, err := ioutil.ReadAll(resp.Body) if err != nil { return nil, err } err = ioutil.WriteFile(filename, body, 0644) if err != nil { return nil, err } return csv.NewReader(bytes.NewBuffer(body)), nil }
// Get return csv data in array. func (d *Data) Get() ([][]string, error) { if len(d.UnixMapData[d.Date.Unix()]) == 0 { data, err := hCache.Get(d.URL(), true) if err != nil { return nil, fmt.Errorf(errorNetworkFail.Error(), err) } csvArrayContent := strings.Split(string(data), "\n") for i := range csvArrayContent { csvArrayContent[i] = strings.TrimSpace(csvArrayContent[i]) } var csvReader *csv.Reader if (d.exchange == "tse" && len(csvArrayContent) > 2) || (d.exchange == "otc" && len(csvArrayContent) > 5) { if d.exchange == "tse" { if d.Name == "" { d.Name = strings.Split(csvArrayContent[0], " ")[2] } csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[2:], "\n"))) } else if d.exchange == "otc" { if d.Name == "" { d.Name = strings.Split(csvArrayContent[2], ":")[1] } csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[5:len(csvArrayContent)-1], "\n"))) } allData, err := csvReader.ReadAll() d.RawData = append(allData, d.RawData...) d.UnixMapData[d.Date.Unix()] = allData d.clearCache() return allData, err } return nil, errorNotEnoughData } return d.UnixMapData[d.Date.Unix()], nil }
func parseFlags() (*csv.Reader, *csv.Reader, error) { flag.Parse() if *dataFile == "" { return nil, nil, fmt.Errorf("probs flag required") } if *remFile == "" { return nil, nil, fmt.Errorf("remaining flag required") } if *weekNumber < 1 || *weekNumber > 13 { return nil, nil, fmt.Errorf("week number must be specified and must be in the range [1,13]") } csvFile, err := os.Open(*dataFile) if err != nil { return nil, nil, err } pReader := csv.NewReader(csvFile) csvFile2, err := os.Open(*remFile) if err != nil { return pReader, nil, err } rReader := csv.NewReader(csvFile2) return pReader, rReader, nil }
func Example() { // testFile is a CSV file with CR line endings. testFile := bytes.NewBufferString("a,b,c\r1,2,3\r").Bytes() // First try reading the csv file the normal way. // The CSV reader doesn't recognize the '\r' line ending. r1 := csv.NewReader(bytes.NewReader(testFile)) lines1, err := r1.ReadAll() if err != nil { fmt.Println(err) } fmt.Printf("Without macreader: %#v\n", lines1) // Now try reading the csv file using macreader. // It should work as expected. r2 := csv.NewReader(New(bytes.NewReader(testFile))) lines2, err := r2.ReadAll() if err != nil { fmt.Println(err) } fmt.Printf("With macreader: %#v\n", lines2) // Output: Without macreader: [][]string{[]string{"a", "b", "c\r1", "2", "3"}} // With macreader: [][]string{[]string{"a", "b", "c"}, []string{"1", "2", "3"}} }
func readCsv(ch chan []string) { var reader *csv.Reader if inputFn == "" { reader = csv.NewReader(os.Stdin) } else { file, err := os.Open(inputFn) if err != nil { fmt.Println("Error:", err) os.Exit(1) } defer file.Close() reader = csv.NewReader(file) } if !strictLen { reader.FieldsPerRecord = -1 } r, _ := utf8.DecodeRuneInString(inputSep) reader.Comma = r reader.LazyQuotes = lazyQuotes for { record, err := reader.Read() if err == io.EOF { close(ch) break } else if err != nil { fmt.Println("Error:", err) close(ch) break } ch <- record } }
//Read read csv for handle func ReadLines(file string, isGbk bool) (lines [][]string, err error) { //catch panic defer func() { if rerr := recover(); rerr != nil { err = errors.New(fmt.Sprintf("read csv file: %v, error: %v", file, rerr)) } }() //open file fi, err := os.Open(file) if err != nil { return nil, err } defer fi.Close() //get reader var reader *csv.Reader if !isGbk { reader = csv.NewReader(fi) } else { //transform gbk to utf8 r := transform.NewReader(fi, simplifiedchinese.GBK.NewDecoder()) reader = csv.NewReader(r) } lines, err = reader.ReadAll() return }
func TestWriteCSV(t *testing.T) { testutil.VerifyTestType(t, testutil.UnitTestType) Convey("With a CSV export output", t, func() { fields := []string{"_id", "x", " y", "z.1.a"} out := &bytes.Buffer{} Convey("Headers should be written correctly", func() { csvExporter := NewCSVExportOutput(fields, false, out) err := csvExporter.WriteHeader() So(err, ShouldBeNil) csvExporter.ExportDocument(bson.D{{"_id", "12345"}}) csvExporter.WriteFooter() csvExporter.Flush() rec, err := csv.NewReader(strings.NewReader(out.String())).Read() So(err, ShouldBeNil) So(rec, ShouldResemble, []string{"_id", "x", " y", "z.1.a"}) }) Convey("Headers should not be written", func() { csvExporter := NewCSVExportOutput(fields, true, out) err := csvExporter.WriteHeader() So(err, ShouldBeNil) csvExporter.ExportDocument(bson.D{{"_id", "12345"}}) csvExporter.WriteFooter() csvExporter.Flush() rec, err := csv.NewReader(strings.NewReader(out.String())).Read() So(err, ShouldBeNil) So(rec, ShouldResemble, []string{"12345", "", "", ""}) }) Convey("Exported document with missing fields should print as blank", func() { csvExporter := NewCSVExportOutput(fields, true, out) csvExporter.ExportDocument(bson.D{{"_id", "12345"}}) csvExporter.WriteFooter() csvExporter.Flush() rec, err := csv.NewReader(strings.NewReader(out.String())).Read() So(err, ShouldBeNil) So(rec, ShouldResemble, []string{"12345", "", "", ""}) }) Convey("Exported document with index into nested objects should print correctly", func() { csvExporter := NewCSVExportOutput(fields, true, out) z := []interface{}{"x", bson.D{{"a", "T"}, {"B", 1}}} csvExporter.ExportDocument(bson.D{{Name: "z", Value: z}}) csvExporter.WriteFooter() csvExporter.Flush() rec, err := csv.NewReader(strings.NewReader(out.String())).Read() So(err, ShouldBeNil) So(rec, ShouldResemble, []string{"", "", "", "T"}) }) Reset(func() { out.Reset() }) }) }
// Get is to get TWSE csv data. func (l *Lists) Get(category string) ([][]string, error) { if TWSECLASS[category] == "" { return nil, errorNotSupport } year, month, day := l.Date.Date() data, err := hCache.PostForm(fmt.Sprintf("%s%s", utils.TWSEHOST, utils.TWSELISTCSV), url.Values{"download": {"csv"}, "selectType": {category}, "qdate": {fmt.Sprintf("%d/%02d/%02d", year-1911, month, day)}}) if err != nil { return nil, fmt.Errorf(errorNetworkFail.Error(), err) } csvArrayContent := strings.Split(string(data), "\n") var csvReader *csv.Reader switch category { case "MS": if len(csvArrayContent) > 6 { csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[4:51], "\n"))) } case "ALLBUT0999", "ALL": if len(csvArrayContent) > 155 { re := regexp.MustCompile("^=?[\"]{1}[0-9A-Z]{4,}") var pickdata []string for _, v := range csvArrayContent { if re.MatchString(v) { if v[0] == 61 { pickdata = append(pickdata, v[1:]) } else { pickdata = append(pickdata, v) } } } csvReader = csv.NewReader(strings.NewReader(strings.Join(pickdata, "\n"))) } default: if len(csvArrayContent) > 9 { csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[4:len(csvArrayContent)-7], "\n"))) } } if csvReader != nil { returnData, err := csvReader.ReadAll() switch category { default: if err == nil { l.categoryRawData[category] = returnData l.formatData(category) } case "MS": } return returnData, err } return nil, errorNotEnoughData }
func main() { ht := makeSimpleHashTable(16, false) // ht := makeBackgroundHashTable(16) // ht := makeMasterHashTable(16) // Insertion insert_file, _ := os.Open("tests/insert.csv") insert_reader := csv.NewReader(insert_file) insert_times := make([]time.Duration, 0) for i := 0; ; i++ { data, err := insert_reader.Read() if err != nil { break } key, _ := strconv.ParseInt(data[1], 0, 0) val := data[2] start := time.Now() ht.insert(int(key), val) insert_times = Extend(insert_times, time.Now().Sub(start)) } insert_out, _ := os.Create("data/simple4.csv") for _, datum := range insert_times { insert_out.Write([]byte(datum.String() + "\n")) } // Get get_file, _ := os.Open("tests/get.csv") get_reader := csv.NewReader(get_file) get_times := make([]time.Duration, 0) get_start := time.Now() for i := 0; ; i++ { data, err := get_reader.Read() if err != nil { break } key, _ := strconv.ParseInt(data[1], 0, 0) start := time.Now() ht.get(int(key)) get_times = Extend(get_times, time.Now().Sub(start)) } fmt.Println("Elapsed time", time.Now().Sub(get_start)) get_out, _ := os.Create("data/simple_get4.csv") for _, datum := range get_times { get_out.Write([]byte(datum.String() + "\n")) } }
func LoadUsers(root string) OSUsers { var users OSUsers shadowf, serr := os.Open(path.Join(root, "etc", "shadow")) passwdf, perr := os.Open(path.Join(root, "etc", "passwd")) defer shadowf.Close() defer passwdf.Close() var shadowentries [][]string if serr != nil || perr != nil { return users } shadowreader := csv.NewReader(shadowf) shadowreader.Comma = ':' for { record, err := shadowreader.Read() if err != nil { if err == io.EOF { break } log.Fatal(err) } shadowentries = append(shadowentries, record) } passwdreader := csv.NewReader(passwdf) passwdreader.Comma = ':' for { record, err := passwdreader.Read() if err != nil { if err == io.EOF { break } log.Fatal(err) } for _, shadowentry := range shadowentries { if shadowentry[0] == record[0] { found_user := new(OSUser) found_user.shadowEntry = shadowentry found_user.passwdEntry = record users = append(users, found_user) break } } } return users }
func (r *Reader) setupCsvReader(reader io.Reader, dialect *FileDialect) error { if dialect.Encoding == "sjis" { r.logger.Info("use ShiftJIS decoder for input.") decoder := japanese.ShiftJIS.NewDecoder() r.csvReader = csv.NewReader(transform.NewReader(reader, decoder)) } else { r.csvReader = csv.NewReader(reader) } r.csvReader.Comma = dialect.Comma r.csvReader.Comment = dialect.Comment r.csvReader.FieldsPerRecord = dialect.FieldsPerRecord return nil }
// Takes two csv files and starts the diffing operation. Goes through revisionX // comparing each row against every row in revisionY and then repeats the // process in reverse. func csvDiff(fileX, fileY io.Reader) (err error) { revisionXRows, err := csv.NewReader(fileX).ReadAll() revisionYRows, err := csv.NewReader(fileY).ReadAll() for _, revisionXRow := range revisionXRows { compareRowAgainstRows(revisionXRow, revisionYRows, true) } for _, revisionYRow := range revisionYRows { compareRowAgainstRows(revisionYRow, revisionXRows, false) } return }
// Load data from CSV files in order to detect gender. If new files are being used, call this again. func NewGenderData(femaleFilename string, maleFilename string) { femaleNamesFile, err := os.Open(femaleFilename) if err != nil { // err is printable // elements passed are separated by space automatically log.Println("Error:", err) } // automatically call Close() at the end of current method defer femaleNamesFile.Close() femaleReader := csv.NewReader(femaleNamesFile) femaleReader.Comma = ',' var fName UsCensusName for { err := unmarshalCensusData(femaleReader, &fName) if err == io.EOF { break } if err != nil { break //panic(err) } femaleNames = append(femaleNames, fName) } maleNamesFile, err := os.Open(maleFilename) if err != nil { // err is printable // elements passed are separated by space automatically log.Println("Error:", err) } // automatically call Close() at the end of current method defer maleNamesFile.Close() maleReader := csv.NewReader(maleNamesFile) maleReader.Comma = ',' var mName UsCensusName for { err := unmarshalCensusData(maleReader, &mName) if err == io.EOF { break } if err != nil { //break panic(err) } maleNames = append(maleNames, mName) } }
func Fuzz(data []byte) int { score := 0 r := csv.NewReader(bytes.NewReader(data)) r.Comment = '#' r.LazyQuotes = true r.TrimLeadingSpace = true rec, err := r.ReadAll() if err != nil { if rec != nil { panic("rec is not nil on error") } } else { score = 1 } r = csv.NewReader(bytes.NewReader(data)) rec, err = r.ReadAll() if err != nil { if rec != nil { panic("rec is not nil on error") } } else { score = 1 var rec0 [][]string for _, r := range rec { if len(r) > 0 { rec0 = append(rec0) } } buf := new(bytes.Buffer) w := csv.NewWriter(buf) w.WriteAll(rec0) r := csv.NewReader(buf) rec1, err := r.ReadAll() if err != nil { panic(err) } if !fuzz.DeepEqual(rec0, rec1) { fmt.Printf("rec0: %+v\n", rec0) fmt.Printf("rec1: %+v\n", rec1) panic("records differ") } } return score }
func main() { m1 := matrix.MakeDenseMatrix([]float64{1, 2, 3, 4, 5, 6}, 3, 2) fmt.Println(m1) fmt.Println(matrix.Transpose(m1)) res, _ := http.Get("http://archive.ics.uci.edu/ml/machine-learning-databases/wine-quality/winequality-red.csv") defer res.Body.Close() reader := csv.NewReader(res.Body) reader.Comma = ';' //contents, _ := ioutil.ReadAll(res.Body) testData := make([][]float64, 0) for { col, err := reader.Read() if err == io.EOF { break } //strings.Split(col, ";") x := make([]float64, len(col)) for i := range col { x[i], _ = strconv.ParseFloat(col[i], 64) } testData = append(testData, x) } ae := NewAutoEncoder() ae.Train(testData[1]) }
func (e *Exporter) scrape(csvRows chan<- []string) { defer close(csvRows) e.totalScrapes.Inc() resp, err := e.client.Get(e.URI) if err != nil { e.up.Set(0) log.Printf("Error while scraping HAProxy: %v", err) return } defer resp.Body.Close() e.up.Set(1) reader := csv.NewReader(resp.Body) reader.TrailingComma = true reader.Comment = '#' for { row, err := reader.Read() if err == io.EOF { break } if err != nil { log.Printf("Error while reading CSV: %v", err) e.csvParseFailures.Inc() break } if len(row) == 0 { continue } csvRows <- row } }
func Parse(filePath string) []Record { src, err := os.Open(filePath) if err != nil { panic(err) } defer src.Close() rows, err := csv.NewReader(src).ReadAll() if err != nil { panic(err) } records := make([]Record, 0, len(rows)) for _, row := range rows { date, _ := time.Parse("2006-01-02", row[0]) open, _ := strconv.ParseFloat(row[1], 64) records = append(records, Record{ Date: date, Open: open, }) } return records }
func FromLineRead(lines []int, path string) { File, err := os.Open(path) if err != nil { log.Println("读取csv文件失败:", err.Error()) return } defer File.Close() r := csv.NewReader(File) r.Read() var list []string var line, index int for { list, err = r.Read() if err != nil { if err != io.EOF { log.Println("读取文件内容失败,错误信息:", err.Error()) } break } if lines[index] == line { log.Println(list) index++ if index >= len(lines) { break } } line++ } }
// Prepares the route to be used in matching. func NewRoute(method, path, action, fixedArgs, routesPath string, line int) (r *Route) { // Handle fixed arguments argsReader := strings.NewReader(fixedArgs) csv := csv.NewReader(argsReader) fargs, err := csv.Read() if err != nil && err != io.EOF { ERROR.Printf("Invalid fixed parameters (%v): for string '%v'", err.Error(), fixedArgs) } r = &Route{ Method: strings.ToUpper(method), Path: path, Action: action, FixedParams: fargs, TreePath: treePath(strings.ToUpper(method), path), routesPath: routesPath, line: line, } // URL pattern if !strings.HasPrefix(r.Path, "/") { ERROR.Print("Absolute URL required.") return } actionSplit := strings.Split(action, ".") if len(actionSplit) == 2 { r.ControllerName = actionSplit[0] r.MethodName = actionSplit[1] } return }
func DecodeCsv(r io.Reader, header bool) (Journal, error) { // "Account","Date","Check","Description","Amount" csvr := csv.NewReader(r) records, err := csvr.ReadAll() if err != nil { return nil, err } if header { records = records[1:] } journal := Journal{} for _, rec := range records { date, err := time.Parse("01/02/2006", rec[1]) if err != nil { fmt.Printf("rec[1]=%v\n", rec[1]) return nil, err } p := &Posting{Account: "??", Amount: rec[4]} trans := &Transaction{ Date: date.Format("2006/01/02"), Description: rec[3], } trans.Post(p) journal = append(journal, trans) } return journal, nil }
func main() { src, err := os.Open("table.csv") if err != nil { log.Fatalln("error opening table.csv:", err) } defer src.Close() dst, err := os.Create("table.json") if err != nil { log.Fatalln("error opening table.csv:", err) } defer dst.Close() rdr := csv.NewReader(src) rows, err := rdr.ReadAll() if err != nil { log.Fatalln("error reading file:", err) } records := make([]record, len(rows)) for _, row := range rows[1:] { records = append(records, makeRecord(row)) } err = json.NewEncoder(dst).Encode(records) if err != nil { log.Fatalln("error encoding to json:", err) } }
func pinFiles(csvfile string) (string, error) { var err error csvFile, err := os.Open(csvfile) if err != nil { return "", fmt.Errorf("error opening csv file: %v\n", err) } defer csvFile.Close() reader := csv.NewReader(csvFile) rawCSVdata, err := reader.ReadAll() if err != nil { return "", fmt.Errorf("error reading csv file: %v\n", err) } hashArray := make([]string, len(rawCSVdata)) for i, each := range rawCSVdata { if logger.Level > 0 { hashArray[i], err = ipfs.PinToIPFS(each[0], logger.Writer) } else { hashArray[i], err = ipfs.PinToIPFS(each[0], bytes.NewBuffer([]byte{})) } if err != nil { return "", err } } hashes := strings.Join(hashArray, "\n") return hashes, nil }
func removeInvalidCerts(csvFilename string, dbMap *gorp.DbMap, stats metrics.Statter, statsRate float32) { file, err := os.Open(csvFilename) cmd.FailOnError(err, "Could not open the file for reading") csvReader := csv.NewReader(file) for { record, err := csvReader.Read() if err == io.EOF { break } else if err != nil { fmt.Println("Error:", err) return } identifierData := core.IdentifierData{ CertSHA1: record[0], } externalCert := core.ExternalCert{ SHA1: record[0], } deleteStart := time.Now() _, err = dbMap.Delete(&identifierData) stats.TimingDuration("ExistingCert.Domains.DeleteLatency", time.Since(deleteStart), statsRate) _, err = dbMap.Delete(&externalCert) stats.TimingDuration("ExistingCert.Certs.DeleteLatency", time.Since(deleteStart), statsRate) stats.Inc("ExistingCert.Removed", 1, statsRate) } }
func main() { // 0. get csv file - http://statetable.com/ // 1. open and read csv file // 2. parse csv // 3. show results f, err := os.Open("../resources/state_table.csv") if err != nil { log.Fatalln("couldn't open csv file", err.Error()) } myReader := csv.NewReader(f) records, err := myReader.ReadAll() if err != nil { log.Fatalln("couldn't read it!", err.Error()) } // create a map for looking up values myStates := make(map[string]string) for _, value := range records { // fmt.Println(key,"-",value) myStates[value[2]] = value[1] } for k, v := range myStates { fmt.Println(k, v) } }
func CSV(header []string, Stdin io.Reader) (interface{}, error) { r := csv.NewReader(Stdin) r.LazyQuotes = true r.TrimLeadingSpace = true if ln := len(header); ln > 0 { r.FieldsPerRecord = ln } recs, err := r.ReadAll() if err != nil { return nil, err } if len(recs) == 0 { return nil, nil } if header == nil { header, recs = recs[0], recs[1:] } rows := []map[string]string{} for rn, rec := range recs { if h, r := len(header), len(rec); h != r { return nil, fmt.Errorf("%d: row len %d ≠ header len %d", rn, r, h) } row := map[string]string{} for i, h := range header { row[h] = rec[i] } rows = append(rows, row) } return rows, nil }
func transform(extractChannel, transformChannel chan *Order) { f, _ := os.Open("./../productList.txt") defer f.Close() r := csv.NewReader(f) records, _ := r.ReadAll() productList := make(map[string]*Product) for _, record := range records { product := new(Product) product.PartNumber = record[0] product.UnitCost, _ = strconv.ParseFloat(record[1], 64) product.UnitPrice, _ = strconv.ParseFloat(record[2], 64) productList[product.PartNumber] = product } for o := range extractChannel { time.Sleep(3 * time.Millisecond) o.UnitCost = productList[o.PartNumber].UnitCost o.UnitPrice = productList[o.PartNumber].UnitPrice transformChannel <- o } close(transformChannel) }
func loadData() []float32 { csvfile, err := Asset("www/GDA94_SITE.CSV") if err != nil { fmt.Println(err) return nil } file := bytes.NewReader(csvfile) r := csv.NewReader(file) records, err := r.ReadAll() if err != nil { log.Fatal(err) } var points []float32 for _, each := range records { lat, _ := strconv.ParseFloat(each[1], 64) lng, _ := strconv.ParseFloat(each[2], 64) lng = 256 * (0.5 + lng/360) lat = project(lat) lat32, lng32 := float32(lat), float32(lng) points = append(points, lng32) points = append(points, lat32) } return points }
func processFile(fc FileConfig, t *template.Template) { // in file iFile, err := os.Open(fc.Name) if err != nil { panic("Can't open " + fc.Name) } defer iFile.Close() // out file oFile, err := os.Create(fc.Name + "_out.txt") if err != nil { panic("Can't open/write to " + fc.Name + "_out.txt") } defer oFile.Close() r := csv.NewReader(iFile) r.Comma = '\t' r.TrailingComma = true record, err := r.Read() for i := 0; err == nil; record, err = r.Read() { if i == 0 { t.ExecuteTemplate(oFile, fc.TemplateName+"_H", makeTemplateContext(&record)) } i = i + 1 if i <= fc.Skip { continue } oFile.Write([]byte("\n")) t.ExecuteTemplate(oFile, fc.TemplateName, makeTemplateContext(&record)) } fmt.Println(err) oFile.Sync() }
func importFiles(csvfile, newdir string) error { var err error csvFile, err := os.Open(csvfile) if err != nil { return fmt.Errorf("error opening csv file: %v\n", err) } defer csvFile.Close() reader := csv.NewReader(csvFile) rawCSVdata, err := reader.ReadAll() if err != nil { return fmt.Errorf("error reading csv file: %v\n", err) } for _, each := range rawCSVdata { if logger.Level > 0 { err = ipfs.GetFromIPFS(each[0], each[1], newdir, logger.Writer) } else { err = ipfs.GetFromIPFS(each[0], each[1], newdir, bytes.NewBuffer([]byte{})) } if err != nil { return err } } return nil }