//Read read csv for handle func ReadLines(file string, isGbk bool) (lines [][]string, err error) { //catch panic defer func() { if rerr := recover(); rerr != nil { err = errors.New(fmt.Sprintf("read csv file: %v, error: %v", file, rerr)) } }() //open file fi, err := os.Open(file) if err != nil { return nil, err } defer fi.Close() //get reader var reader *csv.Reader if !isGbk { reader = csv.NewReader(fi) } else { //transform gbk to utf8 r := transform.NewReader(fi, simplifiedchinese.GBK.NewDecoder()) reader = csv.NewReader(r) } lines, err = reader.ReadAll() return }
// Get is to get OTC csv data. func (o *OTCLists) Get(category string) ([][]string, error) { var ( csvArrayContent []string csvReader *csv.Reader data []byte err error rawData [][]string url string ) url = fmt.Sprintf("%s%s", utils.OTCHOST, fmt.Sprintf(utils.OTCLISTCSV, fmt.Sprintf("%d/%02d/%02d", o.Date.Year()-1911, o.Date.Month(), o.Date.Day()), category)) if data, err = hCache.Get(url, false); err == nil { csvArrayContent = strings.Split(string(data), "\n") if len(csvArrayContent) > 5 { csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[4:len(csvArrayContent)-1], "\n"))) if rawData, err = csvReader.ReadAll(); err == nil { o.categoryRawData[category] = rawData o.formatData(category) return rawData, nil } } } return nil, err }
// Get return csv data in array. func (d *Data) Get() ([][]string, error) { if len(d.UnixMapData[d.Date.Unix()]) == 0 { data, err := hCache.Get(d.URL(), true) if err != nil { return nil, fmt.Errorf(errorNetworkFail.Error(), err) } csvArrayContent := strings.Split(string(data), "\n") for i := range csvArrayContent { csvArrayContent[i] = strings.TrimSpace(csvArrayContent[i]) } var csvReader *csv.Reader if (d.exchange == "tse" && len(csvArrayContent) > 2) || (d.exchange == "otc" && len(csvArrayContent) > 5) { if d.exchange == "tse" { if d.Name == "" { d.Name = strings.Split(csvArrayContent[0], " ")[2] } csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[2:], "\n"))) } else if d.exchange == "otc" { if d.Name == "" { d.Name = strings.Split(csvArrayContent[2], ":")[1] } csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[5:len(csvArrayContent)-1], "\n"))) } allData, err := csvReader.ReadAll() d.RawData = append(allData, d.RawData...) d.UnixMapData[d.Date.Unix()] = allData d.clearCache() return allData, err } return nil, errorNotEnoughData } return d.UnixMapData[d.Date.Unix()], nil }
func (mfr *MicrophoneFileReader) LoadMicrophones(reader *csv.Reader) (bool, error) { records, err := reader.ReadAll() if err != nil { fmt.Println(err) return false, err } for i := 0; i < len(records); i++ { price, err := strconv.ParseFloat(records[i][3], 64) if err != nil { return false, errors.New("Not able to parse price to float") } mic := Microphone{ name: records[i][0], brand: records[i][1], description: records[i][2], price: price, url: records[i][4], micType: records[i][5], micStyle: records[i][6], } mfr.microphoneList = append(mfr.microphoneList, mic) } return true, nil }
// Get is to get TWSE csv data. func (l *Lists) Get(category string) ([][]string, error) { if TWSECLASS[category] == "" { return nil, errorNotSupport } year, month, day := l.Date.Date() data, err := hCache.PostForm(fmt.Sprintf("%s%s", utils.TWSEHOST, utils.TWSELISTCSV), url.Values{"download": {"csv"}, "selectType": {category}, "qdate": {fmt.Sprintf("%d/%02d/%02d", year-1911, month, day)}}) if err != nil { return nil, fmt.Errorf(errorNetworkFail.Error(), err) } csvArrayContent := strings.Split(string(data), "\n") var csvReader *csv.Reader switch category { case "MS": if len(csvArrayContent) > 6 { csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[4:51], "\n"))) } case "ALLBUT0999", "ALL": if len(csvArrayContent) > 155 { re := regexp.MustCompile("^=?[\"]{1}[0-9A-Z]{4,}") var pickdata []string for _, v := range csvArrayContent { if re.MatchString(v) { if v[0] == 61 { pickdata = append(pickdata, v[1:]) } else { pickdata = append(pickdata, v) } } } csvReader = csv.NewReader(strings.NewReader(strings.Join(pickdata, "\n"))) } default: if len(csvArrayContent) > 9 { csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[4:len(csvArrayContent)-7], "\n"))) } } if csvReader != nil { returnData, err := csvReader.ReadAll() switch category { default: if err == nil { l.categoryRawData[category] = returnData l.formatData(category) } case "MS": } return returnData, err } return nil, errorNotEnoughData }
func readAllRecord(reader *csv.Reader) { recs, err := reader.ReadAll() if err != nil { log.Fatal(err) } for _, row := range recs { printRow(row) } }
// ReadAll reads all the lines from a csv file and creates a list of objects from that file. func readAll(file string, c csvReader) ([]csvReader, error) { var r *csv.Reader var err error if len(c.yearS()) == 0 { // no limits on db load r, err = newReader(file) if err != nil { return nil, err } r.Read() //dispose of the first line if not grepping file } else { // limited db load /*years := []int{} for i := c.start(); i < c.end()+1; i++ { years = append(years, i) }*/ r, err = grepYear(file, c.yearS()) if err != nil { return nil, err } } lines, err := r.ReadAll() if err != nil { return nil, err } results := make([]csvReader, len(lines)) for i, l := range lines { b, err := c.csvRead(l) if err != nil { return nil, err } results[i] = b } return results, err }
func executeDiff(cmd *cobra.Command, args []string) { var ( err error match *regexp.Regexp ignore *regexp.Regexp csvMap = make(map[string][]string) fisList = make([]FileInfos, 0) q = make(chan info) wg = new(sync.WaitGroup) ) if len(args) == 0 { cmd.Help() return } // Get glob file args. args, err = core.GetGlobArgs(args) if err != nil { log.Fatalln(err) } // Recheck args. if len(args) <= 1 { cmd.Help() return } // Load csv and store. for _, csvPath := range args { fmt.Println("Open:", csvPath) c, err := os.Open(csvPath) if err != nil { log.Fatalln(err) } defer c.Close() var reader *csv.Reader if sjisIn { reader = csv.NewReader(transform.NewReader(c, japanese.ShiftJIS.NewDecoder())) } else { reader = csv.NewReader(c) } reader.Comma = '\t' // Skip header. _, err = reader.Read() if err != nil { log.Fatalln(err) } left, err := reader.ReadAll() if err != nil { log.Fatalln(err) } // Change data to FileInfos struct. fis := make(FileInfos, 0) for _, r := range left { fis = append(fis, *csvToFileInfo(r)) } fisList = append(fisList, fis) } // Compile if given matches and ignores. if len(matches) != 0 { match, err = core.CompileStrs(matches) if err != nil { log.Fatalln(err) } } if len(ignores) != 0 { ignore, err = core.CompileStrs(ignores) if err != nil { log.Fatalln(err) } } for i, one := range fisList { wg.Add(1) go func(i int, one FileInfos) { defer wg.Done() // Diff fileinfo. for _, oneFi := range one { if fileOnly && oneFi.Type == DIR { continue } if dirOnly && oneFi.Type == FILE { continue } // Ignore check. if ignore != nil && ignore.MatchString(oneFi.Full) { continue } // Match check. if match != nil && !match.MatchString(oneFi.Full) { continue } for j, other := range fisList { if i == j { continue } // Get other's same full path info. otherFi, err := findFileInfo(other, oneFi) if err == nil { // Diff Time. if oneFi.Time != otherFi.Time { q <- info{ path: args[i], index: i, full: oneFi.Full, diff: FileTime, value: oneFi.Time, ford: oneFi.Type, } } // Diff Size. if oneFi.Size != otherFi.Size { q <- info{ path: args[i], index: i, full: oneFi.Full, diff: FileSize, value: oneFi.Size, ford: oneFi.Type, } } // Diff Mode. if oneFi.Mode != otherFi.Mode { q <- info{ path: args[i], index: i, full: oneFi.Full, diff: FileMode, value: oneFi.Mode, ford: oneFi.Type, } } } else { q <- info{ path: args[i], index: i, full: oneFi.Full, diff: FileFull, value: oneFi.Full, ford: oneFi.Type, } } } } }(i, one) } // Async wait. go func() { wg.Wait() close(q) }() // Receive diff and store to array. for info := range q { cnt++ if !silent { fmt.Fprintf(os.Stderr, "Count: %d\r", cnt) } key := info.full + fmt.Sprint(info.diff) if _, ok := csvMap[key]; ok { csvMap[key][info.index+3] = info.value } else { s := make([]string, len(args)+3) s[0] = info.full s[1] = info.ford s[2] = fmt.Sprint(info.diff) s[info.index+3] = info.value csvMap[key] = s } } if len(csvMap) == 0 { fmt.Println("There is no difference !") return } // Output to csv. os.MkdirAll(filepath.Dir(out), os.ModePerm) c, err := os.Create(out) if err != nil { log.Fatalln(err) } defer c.Close() var writer *csv.Writer if sjisOut { writer = csv.NewWriter(transform.NewWriter(c, japanese.ShiftJIS.NewEncoder())) } else { writer = csv.NewWriter(c) } writer.Comma = '\t' writer.UseCRLF = true // Write header. err = writer.Write(append(strings.Split(DiffHeader, "\t"), args...)) if err != nil { log.Fatalln(err) } // map to array. var csvArray records for _, v := range csvMap { csvArray = append(csvArray, v) } // sort if sorts == "" { sorts = "0,2" } sort.Sort(csvArray) for _, v := range csvArray { err = writer.Write(v) if err != nil { log.Fatalln(err) } } writer.Flush() fmt.Printf("Write to [%s]. ([%d] row)\n", out, cnt) }
func write(csvfile string, outfile string, isUtf8 bool) error { //open file fi, err := os.Open(csvfile) if err != nil { return err } defer fi.Close() //get reader var reader *csv.Reader if isUtf8 { reader = csv.NewReader(fi) } else { //transform gbk to utf8 r := transform.NewReader(fi, simplifiedchinese.GBK.NewDecoder()) reader = csv.NewReader(r) } lines, err := reader.ReadAll() if err != nil { return errors.New(fmt.Sprintf("read error: %v", err)) } lineNum := len(lines) if lineNum < 3 { return errors.New(fmt.Sprintf("Csv %v is invalid")) } names, fields, kinds := lines[0], lines[1], lines[2] fieldNum := len(names) filename := filename(csvfile) if outfile == "" { outfile = strings.Replace(csvfile, ".csv", ".go", -1) } //packname packname := "" outAbs, _ := filepath.Abs(outfile) packname = filepath.Base(filepath.Dir(outAbs)) if packname == "" { csvAbs, _ := filepath.Abs(csvfile) packname = filepath.Base(filepath.Dir(csvAbs)) } code := fmt.Sprintf("// Code generated by github.com/foolin/gocsv.\n// source: %v\n// DO NOT EDIT! \n\npackage %v\n\ntype %v struct {\n", filepath.Base(csvfile), packname, upper(filename)) for j := 0; j < fieldNum; j++ { name := names[j] field := fields[j] kind := kinds[j] if kind == "float" { kind = "float32" } code = code + fmt.Sprintf("\t%v %v `csv:\"%v\"` //%v\n", upper(field), kind, field, name) } code = code + "}\n" //mkdir err = os.MkdirAll(filepath.Dir(outAbs), 0755) if err != nil { return err } //write file err = ioutil.WriteFile(outfile, []byte(code), 0755) if err != nil { return err } log.Printf("write file: %v", outfile) return nil }
func ImportDictionaries() map[string][]*models.SuggestItem { var itemMap = make(map[string][]*models.SuggestItem) fileInfo, err := ioutil.ReadDir(DataDirectory) if err != nil { log.Print(err) return itemMap } numberOfDictionaries := 0 for _, file := range fileInfo { if !file.IsDir() && (strings.HasSuffix(file.Name(), ".txt") || strings.HasSuffix(file.Name(), ".txt.gz")) { dictionaryFile := fmt.Sprintf("%s%s%s", DataDirectory, string(os.PathSeparator), file.Name()) dictionaryName := strings.TrimSuffix(strings.TrimSuffix(file.Name(), ".gz"), ".txt") log.Printf("Importing dictionary %s from file %s", dictionaryName, dictionaryFile) csvFile, err := os.Open(dictionaryFile) if err != nil { log.Print(err) continue } defer csvFile.Close() var csvReader *csv.Reader if strings.HasSuffix(file.Name(), ".txt.gz") { gzipReader, gzerr := gzip.NewReader(csvFile) if gzerr == nil { defer gzipReader.Close() csvReader = csv.NewReader(gzipReader) } else { log.Print(gzerr) continue } } else { csvReader = csv.NewReader(csvFile) } csvReader.FieldsPerRecord = 2 csvReader.Comma = '|' csvReader.LazyQuotes = true csvReader.TrimLeadingSpace = true rawCSVdata, err := csvReader.ReadAll() if err != nil { log.Print(err) continue } for _, each := range rawCSVdata { var suggestItem = new(models.SuggestItem) suggestItem.Term = each[0] weight, err := strconv.Atoi(each[1]) if err == nil { suggestItem.Weight = weight itemMap[dictionaryName] = append(itemMap[dictionaryName], suggestItem) } } numberOfDictionaries++ } } log.Printf("Imported %d dictionaries", numberOfDictionaries) return itemMap }