func (mfr *MicrophoneFileReader) LoadMicrophones(reader *csv.Reader) (bool, error) { records, err := reader.ReadAll() if err != nil { fmt.Println(err) return false, err } for i := 0; i < len(records); i++ { price, err := strconv.ParseFloat(records[i][3], 64) if err != nil { return false, errors.New("Not able to parse price to float") } mic := Microphone{ name: records[i][0], brand: records[i][1], description: records[i][2], price: price, url: records[i][4], micType: records[i][5], micStyle: records[i][6], } mfr.microphoneList = append(mfr.microphoneList, mic) } return true, nil }
// Read takes a CSV reader and reads it into a typed List of structs. Each row gets read into a struct named structName, described by headers. If the original data contained headers it is expected that the input reader has already read those and are pointing at the first data row. // If kinds is non-empty, it will be used to type the fields in the generated structs; otherwise, they will be left as string-fields. // In addition to the list, Read returns the typeRef for the structs in the list, and last the typeDef of the structs. func Read(r *csv.Reader, structName string, headers []string, kinds KindSlice, vrw types.ValueReadWriter) (l types.List, typeRef, typeDef types.Type) { typeRef, typeDef = MakeStructTypeFromHeaders(headers, structName, kinds) valueChan := make(chan types.Value, 128) // TODO: Make this a function param? listType := types.MakeCompoundType(types.ListKind, typeRef) listChan := types.NewStreamingTypedList(listType, vrw, valueChan) structFields := typeDef.Desc.(types.StructDesc).Fields for { row, err := r.Read() if err == io.EOF { close(valueChan) break } else if err != nil { panic(err) } fields := make(map[string]types.Value) for i, v := range row { if i < len(headers) { f := structFields[i] fields[f.Name] = StringToType(v, f.T.Kind()) } } valueChan <- types.NewStruct(typeRef, typeDef, fields) } return <-listChan, typeRef, typeDef }
// streamCsv // Streams a CSV Reader into a returned channel. Each CSV row is streamed along with the header. // "true" is sent to the `done` channel when the file is finished. // // Args // csv - The csv.Reader that will be read from. // buffer - The "lines" buffer factor. Send "0" for an unbuffered channel. func streamCsv(csv *csv.Reader, buffer int) (lines chan *CsvLine) { lines = make(chan *CsvLine, buffer) go func() { // get Header header, err := csv.Read() if err != nil { close(lines) return } i := 0 for { line, err := csv.Read() if len(line) > 0 { i++ lines <- &CsvLine{Header: header, Line: line} } if err != nil { fmt.Printf("Sent %d lines\n", i) close(lines) return } } }() return }
// Reads the census CSV data from files func unmarshalCensusData(reader *csv.Reader, v interface{}) error { record, err := reader.Read() if err != nil { return err } s := reflect.ValueOf(v).Elem() if s.NumField() != len(record) { return &csvFieldMismatch{s.NumField(), len(record)} } for i := 0; i < s.NumField(); i++ { f := s.Field(i) switch f.Type().String() { case "string": f.SetString(record[i]) case "int": ival, err := strconv.ParseInt(record[i], 10, 0) if err != nil { return err } f.SetInt(ival) case "float64": fval, err := strconv.ParseFloat(record[i], 64) if err != nil { return err } f.SetFloat(fval) default: return &csvUnsupportedType{f.Type().String()} } } return nil }
// Get is to get OTC csv data. func (o *OTCLists) Get(category string) ([][]string, error) { var ( csvArrayContent []string csvReader *csv.Reader data []byte err error rawData [][]string url string ) url = fmt.Sprintf("%s%s", utils.OTCHOST, fmt.Sprintf(utils.OTCLISTCSV, fmt.Sprintf("%d/%02d/%02d", o.Date.Year()-1911, o.Date.Month(), o.Date.Day()), category)) if data, err = hCache.Get(url, false); err == nil { csvArrayContent = strings.Split(string(data), "\n") if len(csvArrayContent) > 5 { csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[4:len(csvArrayContent)-1], "\n"))) if rawData, err = csvReader.ReadAll(); err == nil { o.categoryRawData[category] = rawData o.formatData(category) return rawData, nil } } } return nil, err }
// Get return csv data in array. func (d *Data) Get() ([][]string, error) { if len(d.UnixMapData[d.Date.Unix()]) == 0 { data, err := hCache.Get(d.URL(), true) if err != nil { return nil, fmt.Errorf(errorNetworkFail.Error(), err) } csvArrayContent := strings.Split(string(data), "\n") for i := range csvArrayContent { csvArrayContent[i] = strings.TrimSpace(csvArrayContent[i]) } var csvReader *csv.Reader if (d.exchange == "tse" && len(csvArrayContent) > 2) || (d.exchange == "otc" && len(csvArrayContent) > 5) { if d.exchange == "tse" { if d.Name == "" { d.Name = strings.Split(csvArrayContent[0], " ")[2] } csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[2:], "\n"))) } else if d.exchange == "otc" { if d.Name == "" { d.Name = strings.Split(csvArrayContent[2], ":")[1] } csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[5:len(csvArrayContent)-1], "\n"))) } allData, err := csvReader.ReadAll() d.RawData = append(allData, d.RawData...) d.UnixMapData[d.Date.Unix()] = allData d.clearCache() return allData, err } return nil, errorNotEnoughData } return d.UnixMapData[d.Date.Unix()], nil }
func RecsFromCSVReader(r *csv.Reader, recs map[string][]RepoRelation) error { for { record, err := r.Read() if err == io.EOF { break } if err != nil { return fmt.Errorf("failed to read csv record: %v", err) } if len(record[0]) < prefixLen { log.Printf("not valid rec record %v", record) continue } sp1 := ShortPathFromURL(record[0]) sp2 := ShortPathFromURL(record[1]) c, err := strconv.ParseFloat(record[2], 64) if err != nil { log.Printf("Failed to ParseFloat(%q, 64): %v", record[2], err) } recs[sp1] = append(recs[sp1], RepoRelation{sp2, c}) } for k, _ := range recs { sort.Sort(ByScore(recs[k])) } log.Printf("%v recs has been loaded", len(recs)) return nil }
func (t *table) start(reader *csv.Reader) { defer t.Stop() defer close(t.rows) headers, err := reader.Read() if err != nil { if perr, ok := err.(*csv.ParseError); ok { // Modifies the underlying err perr.Err = fmt.Errorf("%s. %s", perr.Err, "This can happen when the CSV is malformed, or when the wrong delimiter is used") } t.handleErr(err) return } reader.FieldsPerRecord = len(headers) for { if t.stopped { break } line, err := reader.Read() if err != nil { t.handleErr(err) return } t.rows <- convertLineToRow(line, headers) } }
// Read takes a CSV reader and reads it into a typed List of structs. Each row gets read into a struct named structName, described by headers. If the original data contained headers it is expected that the input reader has already read those and are pointing at the first data row. // If kinds is non-empty, it will be used to type the fields in the generated structs; otherwise, they will be left as string-fields. // In addition to the list, Read returns the typeRef for the structs in the list, and last the typeDef of the structs. func ReadToList(r *csv.Reader, structName string, headers []string, kinds KindSlice, vrw types.ValueReadWriter) (l types.List, t *types.Type) { t, fieldOrder, kindMap := MakeStructTypeFromHeaders(headers, structName, kinds) valueChan := make(chan types.Value, 128) // TODO: Make this a function param? listChan := types.NewStreamingList(vrw, valueChan) for { row, err := r.Read() if err == io.EOF { close(valueChan) break } else if err != nil { panic(err) } fields := make(types.ValueSlice, len(headers)) for i, v := range row { if i < len(headers) { fieldOrigIndex := fieldOrder[i] val, err := StringToValue(v, kindMap[fieldOrigIndex]) if err != nil { d.Chk.Fail(fmt.Sprintf("Error parsing value for column '%s': %s", headers[i], err)) } fields[fieldOrigIndex] = val } } valueChan <- types.NewStructWithType(t, fields) } return <-listChan, t }
func read_record(csv_reader *csv.Reader) (loadedRecord, error) { row, err := csv_reader.Read() if err != nil { return loadedRecord{}, err } if err != nil { panic(err) } var id, key string if len(row) == 2 { id, key = row[0], row[1] } else { key = row[0] } numeric, _ := regexp.Compile("[0-9]") new_key := numeric.ReplaceAllString(key, "") new_key = strings.ToLower(new_key) record := loadedRecord{ id: id, key: new_key, original_key: key, length: len(new_key) - 2, } record.trigrams = create_trigram(new_key, record.length) return record, nil }
//LoadCases will load data stored case by case from a cvs reader into a //feature matrix that has allready been filled with the coresponding empty //features. It is a lower level method generally called after inital setup to parse //a fm, arff, csv etc. func (fm *FeatureMatrix) LoadCases(data *csv.Reader, rowlabels bool) { count := 0 for { record, err := data.Read() if err == io.EOF { break } else if err != nil { log.Print("Error:", err) break } caselabel := fmt.Sprintf("%v", count) if rowlabels { caselabel = record[0] record = record[1:] } fm.CaseLabels = append(fm.CaseLabels, caselabel) for i, v := range record { fm.Data[i].Append(v) } count++ } }
// Parse columns from first header row or from flags func parseColumns(reader *csv.Reader, skipHeader bool, fields string) ([]string, error) { var err error var columns []string if fields != "" { columns = strings.Split(fields, ",") if skipHeader { reader.Read() //Force consume one row } } else { columns, err = reader.Read() if err != nil { return nil, err } } for _, col := range columns { if containsDelimiter(col) { return columns, errors.New("Please specify the correct delimiter with -d.\nHeader column contains a delimiter character: " + col) } } for i, col := range columns { columns[i] = postgresify(col) } return columns, nil }
func processFile(reader *csv.Reader, keyIndex, valueIndex int, headerRow bool) ([]MapEntry, error) { entries := []MapEntry{} for i := 0; true; i++ { row, err := reader.Read() if err == io.EOF { break } if err != nil { return nil, err } if headerRow && i == 0 { continue } numFields := len(row) if keyIndex > numFields { return nil, fmt.Errorf("key index '%d' > number of fields '%d'", keyIndex, numFields) } if valueIndex > numFields { return nil, fmt.Errorf("value index '%d' > number of fields '%d'", valueIndex, numFields) } key := row[keyIndex-1] value := row[valueIndex-1] entries = append(entries, MapEntry{key, value}) } return entries, nil }
//Read read csv for handle func ReadLines(file string, isGbk bool) (lines [][]string, err error) { //catch panic defer func() { if rerr := recover(); rerr != nil { err = errors.New(fmt.Sprintf("read csv file: %v, error: %v", file, rerr)) } }() //open file fi, err := os.Open(file) if err != nil { return nil, err } defer fi.Close() //get reader var reader *csv.Reader if !isGbk { reader = csv.NewReader(fi) } else { //transform gbk to utf8 r := transform.NewReader(fi, simplifiedchinese.GBK.NewDecoder()) reader = csv.NewReader(r) } lines, err = reader.ReadAll() return }
// User CSV // Fields are stored in the sequence as they appear in the struct, with arrays // being represented as semicolon separated lists. // Create a new user read from a CSV reader func NewUserFromCSV(reader *csv.Reader) (user *User, done bool) { line, err := reader.Read() if err != nil { return nil, true } if len(line) != 7 { return nil, false } // comment firstElement := strings.TrimSpace(line[0]) if len(firstElement) > 0 && firstElement[0] == '#' { return nil, false } level := line[2] ValidFrom, _ := time.Parse("2006-01-02 15:04", line[4]) ValidTo, _ := time.Parse("2006-01-02 15:04", line[5]) if !isValidLevel(level) { log.Printf("Got invalid level '%s'", level) return nil, false } return &User{ Name: line[0], ContactInfo: line[1], UserLevel: Level(level), Sponsors: strings.Split(line[3], ";"), ValidFrom: ValidFrom, // field 4 ValidTo: ValidTo, // field 5 Codes: strings.Split(line[6], ";")}, false }
func (d *Data) readFeatures(in csv.Reader) { for { record, err := in.Read() if err == io.EOF { break } if err != nil { fmt.Println("ERROR: ", err) } x := len(record) conv := make([]float64, x, x) correctData := true for i := 0; i < x; i++ { conv[i], err = strconv.ParseFloat(record[i], 64) if err != nil { fmt.Println("ERROR: ", err) correctData = false } } if correctData { d.AppendRow(conv) } } }
func (d *Data) readHeader(r csv.Reader) { record, err := r.Read() if err != nil { fmt.Println("ERROR: ", err) } d.setFeatures(record) }
// Get is to get TWSE csv data. func (l *Lists) Get(category string) ([][]string, error) { if TWSECLASS[category] == "" { return nil, errorNotSupport } year, month, day := l.Date.Date() data, err := hCache.PostForm(fmt.Sprintf("%s%s", utils.TWSEHOST, utils.TWSELISTCSV), url.Values{"download": {"csv"}, "selectType": {category}, "qdate": {fmt.Sprintf("%d/%02d/%02d", year-1911, month, day)}}) if err != nil { return nil, fmt.Errorf(errorNetworkFail.Error(), err) } csvArrayContent := strings.Split(string(data), "\n") var csvReader *csv.Reader switch category { case "MS": if len(csvArrayContent) > 6 { csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[4:51], "\n"))) } case "ALLBUT0999", "ALL": if len(csvArrayContent) > 155 { re := regexp.MustCompile("^=?[\"]{1}[0-9A-Z]{4,}") var pickdata []string for _, v := range csvArrayContent { if re.MatchString(v) { if v[0] == 61 { pickdata = append(pickdata, v[1:]) } else { pickdata = append(pickdata, v) } } } csvReader = csv.NewReader(strings.NewReader(strings.Join(pickdata, "\n"))) } default: if len(csvArrayContent) > 9 { csvReader = csv.NewReader(strings.NewReader(strings.Join(csvArrayContent[4:len(csvArrayContent)-7], "\n"))) } } if csvReader != nil { returnData, err := csvReader.ReadAll() switch category { default: if err == nil { l.categoryRawData[category] = returnData l.formatData(category) } case "MS": } return returnData, err } return nil, errorNotEnoughData }
func readOneRecordAtaTime(reader *csv.Reader) { for { row, err := reader.Read() if err != nil { break } printRow(row) } }
func readAllRecord(reader *csv.Reader) { recs, err := reader.ReadAll() if err != nil { log.Fatal(err) } for _, row := range recs { printRow(row) } }
func read(r *csv.Reader, f func(record []string)) { for { record, err := r.Read() if err == io.EOF { break } else if err != nil && err != io.EOF { log.Fatal("Error reading defects file ", err) } if record[1] != "Id" { f(record) } } }
func copyCSVRows(i *Import, reader *csv.Reader, ignoreErrors bool, delimiter string, columns []string) (error, int, int) { success := 0 failed := 0 for { cols := make([]interface{}, len(columns)) record, err := reader.Read() if err == io.EOF { break } if err != nil { line := strings.Join(record, delimiter) failed++ if ignoreErrors { os.Stderr.WriteString(string(line)) continue } else { err = errors.New(fmt.Sprintf("%s: %s", err, line)) return err, success, failed } } //Loop ensures we don't insert too many values and that //values are properly converted into empty interfaces for i, col := range record { cols[i] = col } err = i.AddRow(cols...) if err != nil { line := strings.Join(record, delimiter) failed++ if ignoreErrors { os.Stderr.WriteString(string(line)) continue } else { err = errors.New(fmt.Sprintf("%s: %s", err, line)) return err, success, failed } } success++ } return nil, success, failed }
func (con Sif2Cx) readSIF(reader *csv.Reader, w *bufio.Writer) { // Set delimiter var netName string if con.Name == "" { netName = "CX from SIF file" } else { netName = con.Name } reader.Comma = con.Delimiter reader.LazyQuotes = true // nodes already serialized nodesExists := make(map[string]int64) nodeCounter := int64(0) w.Write([]byte("[")) for { record, err := reader.Read() if err == io.EOF { // Add network attributes at the end of doc. netAttr := cx.NetworkAttribute{N: "name", V: netName} attrList := []cx.NetworkAttribute{netAttr} netAttrs := make(map[string][]cx.NetworkAttribute) netAttrs["networkAttributes"] = attrList json.NewEncoder(w).Encode(netAttrs) w.Write([]byte("]")) w.Flush() break } if err != nil { log.Fatal(err) } if len(record) == 3 { toJson(record, nodesExists, &nodeCounter, w) } w.Flush() } }
func SMerge(r *csv.Reader, seek io.Seeker) []int { var m = make(map[string]int) var list []string var repeat []string r.Read() var key string var err error var line int for { list, err = r.Read() if err != nil { if err != io.EOF { log.Println("读取文件内容失败,错误信息:", err.Error()) } break } if len(list) != 5 { log.Println("无效数据:", list) line++ continue } key = strings.TrimSpace(list[1] + list[3]) if key != "" { if _, ok := m[key]; ok { repeat = append(repeat, key) if !first { m[key] = line } } else { m[key] = line } } line++ } var lines = make([]int, 0, len(m)) for _, v := range repeat { if unrepeat { delete(m, v) } else { lines = append(lines, m[v]) } } if unrepeat { for _, v := range m { lines = append(lines, v) } } sort.Ints(lines) return lines }
func ReadToMap(r *csv.Reader, headersRaw []string, pkIdx int, kinds KindSlice, vrw types.ValueReadWriter) types.Map { headers := make([]string, 0, len(headersRaw)-1) for i, h := range headersRaw { if i != pkIdx { headers = append(headers, h) } } var pkKind types.NomsKind if len(kinds) == 0 { pkKind = types.StringKind } else { pkKind = kinds[pkIdx] kinds = append(kinds[:pkIdx], kinds[pkIdx+1:]...) } t, fieldOrder, kindMap := MakeStructTypeFromHeaders(headers, "", kinds) kvChan := make(chan types.Value, 128) mapChan := types.NewStreamingMap(vrw, kvChan) for { row, err := r.Read() if err == io.EOF { break } else if err != nil { panic(err) } fieldIndex := 0 var pk types.Value fields := make(types.ValueSlice, len(headers)) for x, v := range row { if x == pkIdx { pk, err = StringToValue(v, pkKind) } else if fieldIndex < len(headers) { fieldOrigIndex := fieldOrder[fieldIndex] fields[fieldOrigIndex], err = StringToValue(v, kindMap[fieldOrigIndex]) fieldIndex++ } if err != nil { d.Chk.Fail(fmt.Sprintf("Error parsing value for column '%s': %s", headers[x], err)) } } kvChan <- pk kvChan <- types.NewStructWithType(t, fields) } close(kvChan) return <-mapChan }
func Signal(reader *csv.Reader) ([]float64, error) { fields, err := reader.Read() if err != nil { return nil, err } ndim := len(fields) point := make([]float64, ndim) for i := 0; i < ndim; i++ { point[i], err = strconv.ParseFloat(fields[i], 64) if err != nil { return nil, err } } return point, nil }
func readHeaders(reader *csv.Reader, filter mapset.Set) (headers map[string]int) { line, err := reader.Read() util.Warn(err, "reading headers") headers = make(map[string]int, len(line)) for i, k := range line { //if _, ok := c.fields[k]; !ok { k = util.Slugged(k, "_") if filter == nil || filter.Contains(k) { headers[k] = i } //} } util.Debug("Headers %v", headers) return }
func inferTypes(csv *csv.Reader, fields []string, numLines int) (map[string]interface{}, error) { template := make(map[string]interface{}) learners := make([]*typeguessing.Learner, len(fields)) for i := 0; i < len(learners); i++ { learners[i] = typeguessing.NewLearner() } for i := 0; i < numLines; i++ { line, err := csv.Read() if err == io.EOF { break } else if err != nil { panic(err) } for j, _ := range fields { learners[j].Feed(line[j]) } } for i, f := range fields { exampleVal := learners[i].BestGuess() template[f] = exampleVal } return template, nil }
func ReadToMap(r *csv.Reader, headers_raw []string, pkIdx int, kinds KindSlice, vrw types.ValueReadWriter) (m types.Map) { headers := make([]string, 0, len(headers_raw)-1) for i, h := range headers_raw { if i != pkIdx { headers = append(headers, types.EscapeStructField(h)) } } var pkKind types.NomsKind if len(kinds) == 0 { pkKind = types.StringKind } else { pkKind = kinds[pkIdx] kinds = append(kinds[:pkIdx], kinds[pkIdx+1:]...) } t := MakeStructTypeFromHeaders(headers, "", kinds) kindMap := make(map[string]types.NomsKind, len(headers)) t.Desc.(types.StructDesc).IterFields(func(name string, t *types.Type) { kindMap[name] = t.Kind() }) m = types.NewMap() fields := map[string]types.Value{} var pk types.Value for { row, err := r.Read() if err == io.EOF { break } else if err != nil { panic(err) } fieldIndex := 0 for x, v := range row { if x == pkIdx { pk = StringToType(v, pkKind) } else if fieldIndex < len(headers) { name := headers[fieldIndex] fields[name] = StringToType(v, kindMap[name]) fieldIndex++ } } m = m.Set(pk, types.NewStructWithType(t, fields)) } return }
func QMerge(r *csv.Reader) { var m = make(map[string][]string) var list []string var repeat []string r.Read() var err error var key string for { list, err = r.Read() if err != nil { if err != io.EOF { log.Println("读取文件内容失败,错误信息:", err.Error()) } break } if len(list) != 5 { log.Println("无效数据:", list) continue } key = strings.TrimSpace(list[1] + list[3]) if key != "" { if _, ok := m[key]; ok { repeat = append(repeat, key) if !first { m[key] = list } } else { m[key] = list } } } for _, value := range repeat { if unrepeat { delete(m, value) } else { log.Println(m[value]) } } if unrepeat { for _, value := range m { log.Println(value) } } }