Beispiel #1
0
// Read takes a CSV reader and reads it into a typed List of structs. Each row gets read into a struct named structName, described by headers. If the original data contained headers it is expected that the input reader has already read those and are pointing at the first data row.
// If kinds is non-empty, it will be used to type the fields in the generated structs; otherwise, they will be left as string-fields.
// In addition to the list, Read returns the typeRef for the structs in the list, and last the typeDef of the structs.
func Read(r *csv.Reader, structName string, headers []string, kinds KindSlice, vrw types.ValueReadWriter) (l types.List, typeRef, typeDef types.Type) {
	typeRef, typeDef = MakeStructTypeFromHeaders(headers, structName, kinds)
	valueChan := make(chan types.Value, 128) // TODO: Make this a function param?
	listType := types.MakeCompoundType(types.ListKind, typeRef)
	listChan := types.NewStreamingTypedList(listType, vrw, valueChan)

	structFields := typeDef.Desc.(types.StructDesc).Fields

	for {
		row, err := r.Read()
		if err == io.EOF {
			close(valueChan)
			break
		} else if err != nil {
			panic(err)
		}

		fields := make(map[string]types.Value)
		for i, v := range row {
			if i < len(headers) {
				f := structFields[i]
				fields[f.Name] = StringToType(v, f.T.Kind())
			}
		}
		valueChan <- types.NewStruct(typeRef, typeDef, fields)
	}

	return <-listChan, typeRef, typeDef
}
Beispiel #2
0
// FIXME: run with pipe
func (s *testSuite) TestCSVExporter() {
	storeName := "store"
	setName := "csv"
	header := []string{"a", "b", "c"}
	payload := [][]string{
		[]string{"5", "7", "100"},
		[]string{"4", "10", "255"},
		[]string{"512", "12", "55"},
	}
	structName := "SomeStruct"

	// Setup data store
	cs := chunks.NewLevelDBStore(s.LdbDir, storeName, 1, false)
	ds := dataset.NewDataset(datas.NewDataStore(cs), setName)

	// Build Struct fields based on header
	f := make([]types.Field, 0, len(header))
	for _, key := range header {
		f = append(f, types.Field{
			Name: key,
			T:    types.MakePrimitiveType(types.StringKind),
		})
	}

	typeDef := types.MakeStructType(structName, f, types.Choices{})
	pkg := types.NewPackage([]types.Type{typeDef}, []ref.Ref{})
	pkgRef := types.RegisterPackage(&pkg)
	typeRef := types.MakeType(pkgRef, 0)
	structFields := typeDef.Desc.(types.StructDesc).Fields

	// Build data rows
	structs := make([]types.Value, len(payload))
	for i, row := range payload {
		fields := make(map[string]types.Value)
		for j, v := range row {
			fields[structFields[j].Name] = types.NewString(v)
		}
		structs[i] = types.NewStruct(typeRef, typeDef, fields)
	}

	listType := types.MakeCompoundType(types.ListKind, typeRef)
	ds.Commit(types.NewTypedList(listType, structs...))
	ds.Store().Close()

	// Run exporter
	out := s.Run(main, []string{"-store", storeName, "-ds", setName})

	// Verify output
	csvReader := csv.NewReader(strings.NewReader(out))

	row, err := csvReader.Read()
	d.Chk.NoError(err)
	s.Equal(header, row)

	for i := 0; i < len(payload); i++ {
		row, err := csvReader.Read()
		d.Chk.NoError(err)
		s.Equal(payload[i], row)
	}

	row, err = csvReader.Read()
	s.Equal(io.EOF, err)
}