Esempio n. 1
0
func start(dataset string, mount mount) {
	ds, err := spec.GetDataset(dataset)
	if err != nil {
		fmt.Fprintf(os.Stderr, "Could not create dataset: %s\n", err)
		return
	}

	hv, ok := ds.MaybeHeadValue()
	if ok {
		if !types.IsSubtype(fsType, hv.Type()) {
			fmt.Fprintf(os.Stderr, "Invalid dataset head: expected type '%s' but found type '%s'\n", fsType.Desc.(types.StructDesc).Name, hv.Type().Desc.(types.StructDesc).Name)
			return
		}
	} else {
		rootAttr := makeAttr(0777) // create the root directory with maximally permissive permissions
		rootDir := types.NewStructWithType(directoryType, types.ValueSlice{types.NewMap()})
		rootInode := types.NewStructWithType(inodeType, types.ValueSlice{rootAttr, rootDir})
		hv = types.NewStructWithType(fsType, types.ValueSlice{rootInode})
	}

	mount(&nomsFS{
		FileSystem: pathfs.NewDefaultFileSystem(),
		ds:         ds,
		head:       hv.(types.Struct),
		mdLock:     &sync.Mutex{},
		nodes:      make(map[hash.Hash]*nNode),
	})
}
Esempio n. 2
0
func createStruct(i uint64) types.Value {
	return types.NewStructWithType(structType, types.ValueSlice{
		types.Bool(i%2 == 0),
		types.Number(i),
		types.String(fmt.Sprintf("i am a 55 bytes............................%12d", i)),
	})
}
Esempio n. 3
0
func (fs *nomsFS) createCommon(path string, mode uint32, createContents func() types.Value) (*nNode, fuse.Status) {
	components := strings.Split(path, "/")

	fname := components[len(components)-1]
	components = components[:len(components)-1]

	// Grab the spot in the hierarchy where the new node will go.
	parent, code := fs.getPathComponents(components)
	if code != fuse.OK {
		return nil, code
	}

	if nodeType(parent.inode) != "Directory" {
		return nil, fuse.ENOTDIR
	}

	// Create the new node.
	inode := types.NewStructWithType(inodeType, types.ValueSlice{makeAttr(mode), createContents()})

	np := fs.getNode(inode, fname, parent)

	// Insert the new node into the hierarchy.
	fs.splice(np)
	fs.commit()

	return np, fuse.OK
}
Esempio n. 4
0
func createStruct(i uint64) types.Value {
	return types.NewStructWithType(structType, map[string]types.Value{
		"str":  types.String(fmt.Sprintf("i am a 55 bytes............................%12d", i)),
		"num":  types.Number(i),
		"bool": types.Bool(i%2 == 0),
	})
}
Esempio n. 5
0
// Read takes a CSV reader and reads it into a typed List of structs. Each row gets read into a struct named structName, described by headers. If the original data contained headers it is expected that the input reader has already read those and are pointing at the first data row.
// If kinds is non-empty, it will be used to type the fields in the generated structs; otherwise, they will be left as string-fields.
// In addition to the list, Read returns the typeRef for the structs in the list, and last the typeDef of the structs.
func ReadToList(r *csv.Reader, structName string, headers []string, kinds KindSlice, vrw types.ValueReadWriter) (l types.List, t *types.Type) {
	t, fieldOrder, kindMap := MakeStructTypeFromHeaders(headers, structName, kinds)
	valueChan := make(chan types.Value, 128) // TODO: Make this a function param?
	listChan := types.NewStreamingList(vrw, valueChan)

	for {
		row, err := r.Read()
		if err == io.EOF {
			close(valueChan)
			break
		} else if err != nil {
			panic(err)
		}

		fields := make(types.ValueSlice, len(headers))
		for i, v := range row {
			if i < len(headers) {
				fieldOrigIndex := fieldOrder[i]
				val, err := StringToValue(v, kindMap[fieldOrigIndex])
				if err != nil {
					d.Chk.Fail(fmt.Sprintf("Error parsing value for column '%s': %s", headers[i], err))
				}
				fields[fieldOrigIndex] = val
			}
		}
		valueChan <- types.NewStructWithType(t, fields)
	}

	return <-listChan, t
}
Esempio n. 6
0
func NewCommit() types.Struct {
	initialFields := map[string]types.Value{
		ValueField:   types.String(""),
		ParentsField: types.NewSet(),
	}

	return types.NewStructWithType(commitType, initialFields)
}
Esempio n. 7
0
func (fs *nomsFS) Symlink(targetPath string, path string, context *fuse.Context) fuse.Status {
	fs.mdLock.Lock()
	defer fs.mdLock.Unlock()
	_, code := fs.createCommon(path, 0755, func() types.Value {
		return types.NewStructWithType(symlinkType, types.ValueSlice{types.String(targetPath)})
	})

	return code
}
Esempio n. 8
0
func (fs *nomsFS) Mkdir(path string, mode uint32, context *fuse.Context) fuse.Status {
	fs.mdLock.Lock()
	defer fs.mdLock.Unlock()
	_, code := fs.createCommon(path, mode, func() types.Value {
		return types.NewStructWithType(directoryType, types.ValueSlice{types.NewMap()})
	})

	return code
}
Esempio n. 9
0
// FIXME: run with pipe
func (s *testSuite) TestCSVExporter() {
	setName := "csv"
	header := []string{"a", "b", "c"}
	payload := [][]string{
		[]string{"5", "7", "100"},
		[]string{"4", "10", "255"},
		[]string{"512", "12", "55"},
	}
	structName := "SomeStruct"

	// Setup data store
	cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false)
	ds := dataset.NewDataset(datas.NewDatabase(cs), setName)

	// Build Struct fields based on header
	f := make(types.TypeMap, len(header))
	for _, key := range header {
		f[key] = types.StringType
	}

	typ := types.MakeStructType(structName, f)

	// Build data rows
	structs := make([]types.Value, len(payload))
	for i, row := range payload {
		fields := make(map[string]types.Value)
		for j, v := range row {
			name := header[j]
			fields[name] = types.String(v)
		}
		structs[i] = types.NewStructWithType(typ, fields)
	}

	ds.Commit(types.NewList(structs...))
	ds.Database().Close()

	// Run exporter
	dataspec := test_util.CreateValueSpecString("ldb", s.LdbDir, setName)
	out := s.Run(main, []string{dataspec})

	// Verify output
	csvReader := csv.NewReader(strings.NewReader(out))

	row, err := csvReader.Read()
	d.Chk.NoError(err)
	s.Equal(header, row)

	for i := 0; i < len(payload); i++ {
		row, err := csvReader.Read()
		d.Chk.NoError(err)
		s.Equal(payload[i], row)
	}

	_, err = csvReader.Read()
	s.Equal(io.EOF, err)
}
Esempio n. 10
0
func makeAttr(mode uint32) types.Struct {
	now := time.Now()
	ctime := types.Number(float64(now.Unix()) + float64(now.Nanosecond())/1000000000)
	mtime := ctime

	user := fuse.CurrentOwner()
	gid := types.Number(float64(user.Gid))
	uid := types.Number(float64(user.Uid))

	return types.NewStructWithType(attrType, types.ValueSlice{ctime, gid, types.Number(mode), mtime, uid, types.NewMap()})
}
Esempio n. 11
0
func ReadToMap(r *csv.Reader, headersRaw []string, pkIdx int, kinds KindSlice, vrw types.ValueReadWriter) types.Map {
	headers := make([]string, 0, len(headersRaw)-1)
	for i, h := range headersRaw {
		if i != pkIdx {
			headers = append(headers, h)
		}
	}

	var pkKind types.NomsKind
	if len(kinds) == 0 {
		pkKind = types.StringKind
	} else {
		pkKind = kinds[pkIdx]
		kinds = append(kinds[:pkIdx], kinds[pkIdx+1:]...)
	}

	t, fieldOrder, kindMap := MakeStructTypeFromHeaders(headers, "", kinds)

	kvChan := make(chan types.Value, 128)
	mapChan := types.NewStreamingMap(vrw, kvChan)
	for {
		row, err := r.Read()
		if err == io.EOF {
			break
		} else if err != nil {
			panic(err)
		}

		fieldIndex := 0
		var pk types.Value
		fields := make(types.ValueSlice, len(headers))
		for x, v := range row {
			if x == pkIdx {
				pk, err = StringToValue(v, pkKind)
			} else if fieldIndex < len(headers) {
				fieldOrigIndex := fieldOrder[fieldIndex]
				fields[fieldOrigIndex], err = StringToValue(v, kindMap[fieldOrigIndex])
				fieldIndex++
			}
			if err != nil {
				d.Chk.Fail(fmt.Sprintf("Error parsing value for column '%s': %s", headers[x], err))
			}
		}
		kvChan <- pk
		kvChan <- types.NewStructWithType(t, fields)
	}

	close(kvChan)
	return <-mapChan
}
Esempio n. 12
0
func ReadToMap(r *csv.Reader, headers_raw []string, pkIdx int, kinds KindSlice, vrw types.ValueReadWriter) (m types.Map) {
	headers := make([]string, 0, len(headers_raw)-1)
	for i, h := range headers_raw {
		if i != pkIdx {
			headers = append(headers, types.EscapeStructField(h))
		}
	}

	var pkKind types.NomsKind
	if len(kinds) == 0 {
		pkKind = types.StringKind
	} else {
		pkKind = kinds[pkIdx]
		kinds = append(kinds[:pkIdx], kinds[pkIdx+1:]...)
	}

	t := MakeStructTypeFromHeaders(headers, "", kinds)
	kindMap := make(map[string]types.NomsKind, len(headers))
	t.Desc.(types.StructDesc).IterFields(func(name string, t *types.Type) {
		kindMap[name] = t.Kind()
	})

	m = types.NewMap()
	fields := map[string]types.Value{}
	var pk types.Value
	for {
		row, err := r.Read()
		if err == io.EOF {
			break
		} else if err != nil {
			panic(err)
		}

		fieldIndex := 0
		for x, v := range row {
			if x == pkIdx {
				pk = StringToType(v, pkKind)
			} else if fieldIndex < len(headers) {
				name := headers[fieldIndex]
				fields[name] = StringToType(v, kindMap[name])
				fieldIndex++
			}
		}
		m = m.Set(pk, types.NewStructWithType(t, fields))
	}
	return
}
Esempio n. 13
0
func (fs *nomsFS) Create(path string, flags uint32, mode uint32, context *fuse.Context) (nodefs.File, fuse.Status) {
	fs.mdLock.Lock()
	defer fs.mdLock.Unlock()
	np, code := fs.createCommon(path, mode, func() types.Value {
		blob := types.NewEmptyBlob()
		return types.NewStructWithType(fileType, types.ValueSlice{fs.ds.Database().WriteValue(blob)})
	})
	if code != fuse.OK {
		return nil, code
	}

	nfile := nomsFile{
		File: nodefs.NewDefaultFile(),

		fs:   fs,
		node: np,
	}
	return nfile, fuse.OK
}
Esempio n. 14
0
// Read takes a CSV reader and reads it into a typed List of structs. Each row gets read into a struct named structName, described by headers. If the original data contained headers it is expected that the input reader has already read those and are pointing at the first data row.
// If kinds is non-empty, it will be used to type the fields in the generated structs; otherwise, they will be left as string-fields.
// In addition to the list, Read returns the typeRef for the structs in the list, and last the typeDef of the structs.
func ReadToList(r *csv.Reader, structName string, headers_raw []string, kinds KindSlice, vrw types.ValueReadWriter) (l types.List, t *types.Type) {
	headers := make([]string, len(headers_raw))
	for i, h := range headers_raw {
		headers[i] = types.EscapeStructField(h)
	}

	t = MakeStructTypeFromHeaders(headers, structName, kinds)
	valueChan := make(chan types.Value, 128) // TODO: Make this a function param?
	listChan := types.NewStreamingList(vrw, valueChan)

	kindMap := make(map[string]types.NomsKind, len(headers))
	t.Desc.(types.StructDesc).IterFields(func(name string, t *types.Type) {
		kindMap[name] = t.Kind()
	})

	for {
		row, err := r.Read()
		if err == io.EOF {
			close(valueChan)
			break
		} else if err != nil {
			panic(err)
		}

		fields := make(map[string]types.Value)
		for i, v := range row {
			if i < len(headers) {
				name := headers[i]
				fields[name] = StringToType(v, kindMap[name])
			}
		}
		valueChan <- types.NewStructWithType(t, fields)
	}

	return <-listChan, t
}
Esempio n. 15
0
// NewCommit creates a new commit object. The type of Commit is computed based on the type of the value, the type of the meta info as well as the type of the parents.
//
// For the first commit we get:
//
// ```
// struct Commit {
//   meta: M,
//   parents: Set<Ref<Cycle<0>>>,
//   value: T,
// }
// ```
//
// As long as we continue to commit values with type T and meta of type M that type stays the same.
//
// When we later do a commit with value of type U and meta of type N we get:
//
// ```
// struct Commit {
//   meta: N,
//   parents: Set<Ref<struct Commit {
//     meta: M | N,
//     parents: Set<Ref<Cycle<0>>>,
//     value: T | U
//   }>>,
//   value: U,
// }
// ```
//
// Similarly if we do a commit with a different type for the meta info.
//
// The new type gets combined as a union type for the value/meta of the inner commit struct.
func NewCommit(value types.Value, parents types.Set, meta types.Struct) types.Struct {
	t := makeCommitType(value.Type(), valueTypesFromParents(parents, ValueField), meta.Type(), valueTypesFromParents(parents, MetaField))
	return types.NewStructWithType(t, types.ValueSlice{meta, parents, value})
}