Exemplo n.º 1
0
func TestNomsBlobDiff(t *testing.T) {
	assert := assert.New(t)

	expected := "-   Blob (2.0 kB)\n+   Blob (11 B)\n"
	b1 := types.NewBlob(strings.NewReader(strings.Repeat("x", 2*1024)))
	b2 := types.NewBlob(strings.NewReader("Hello World"))
	buf := util.NewBuffer(nil)
	Diff(buf, b1, b2)
	assert.Equal(expected, buf.String())
}
Exemplo n.º 2
0
func (s *testSuite) TestImportFromFile() {
	assert := s.Assert()

	f, err := ioutil.TempFile("", "TestImportFromFile")
	assert.NoError(err)

	f.Write([]byte("abcdef"))
	f.Close()

	dsName := spec.CreateValueSpecString("ldb", s.LdbDir, "ds")
	s.Run(main, []string{f.Name(), dsName})

	db, blob, err := spec.GetPath(dsName + ".value")
	assert.NoError(err)

	expected := types.NewBlob(bytes.NewBufferString("abcdef"))
	assert.True(expected.Equals(blob))

	meta := db.Head("ds").Get(datas.MetaField).(types.Struct)
	metaDesc := meta.Type().Desc.(types.StructDesc)
	assert.Equal(2, metaDesc.Len())
	assert.NotNil(metaDesc.Field("date"))
	assert.Equal(f.Name(), string(meta.Get("file").(types.String)))

	db.Close()
}
Exemplo n.º 3
0
func (s *testSuite) TestImportFromStdin() {
	assert := s.Assert()

	oldStdin := os.Stdin
	newStdin, blobOut, err := os.Pipe()
	assert.NoError(err)

	os.Stdin = newStdin
	defer func() {
		os.Stdin = oldStdin
	}()

	go func() {
		blobOut.Write([]byte("abcdef"))
		blobOut.Close()
	}()

	dsName := spec.CreateValueSpecString("ldb", s.LdbDir, "ds")
	// Run() will return when blobOut is closed.
	s.Run(main, []string{"--stdin", dsName})

	db, blob, err := spec.GetPath(dsName + ".value")
	assert.NoError(err)

	expected := types.NewBlob(bytes.NewBufferString("abcdef"))
	assert.True(expected.Equals(blob))

	meta := db.Head("ds").Get(datas.MetaField).(types.Struct)
	// The meta should only have a "date" field.
	metaDesc := meta.Type().Desc.(types.StructDesc)
	assert.Equal(1, metaDesc.Len())
	assert.NotNil(metaDesc.Field("date"))

	db.Close()
}
Exemplo n.º 4
0
func (s *testSuite) TestCSVImporterFromBlob() {
	test := func(pathFlag string) {
		defer os.RemoveAll(s.LdbDir)

		newDB := func() datas.Database {
			cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false)
			return datas.NewDatabase(cs)
		}

		db := newDB()
		rawDS := dataset.NewDataset(db, "raw")
		csv := &bytes.Buffer{}
		writeCSV(csv)
		rawDS.CommitValue(types.NewBlob(csv))
		db.Close()

		stdout, stderr := s.Run(main, []string{
			"--no-progress", "--column-types", "String,Number",
			pathFlag, spec.CreateValueSpecString("ldb", s.LdbDir, "raw.value"),
			spec.CreateValueSpecString("ldb", s.LdbDir, "csv"),
		})
		s.Equal("", stdout)
		s.Equal("", stderr)

		db = newDB()
		defer db.Close()
		csvDS := dataset.NewDataset(db, "csv")
		validateCSV(s, csvDS.HeadValue().(types.List))
	}
	test("--path")
	test("-p")
}
Exemplo n.º 5
0
func (s *bgSuite) TestBlobGet() {
	blob_bytes := []byte("hello")
	blob := types.NewBlob(bytes.NewBuffer(blob_bytes))

	db, err := spec.GetDatabase(s.TempDir)
	s.NoError(err)
	hash := db.WriteValue(blob)
	db.Close()

	hash_spec := fmt.Sprintf("%s::#%s", s.TempDir, hash.TargetHash().String())
	file_path := filepath.Join(s.TempDir, "out")
	s.Run(main, []string{hash_spec, file_path})

	file_bytes, err := ioutil.ReadFile(file_path)
	s.NoError(err)
	s.Equal(blob_bytes, file_bytes)
}
Exemplo n.º 6
0
func (fs *nomsFS) SetXAttr(path string, key string, data []byte, flags int, context *fuse.Context) fuse.Status {
	fs.mdLock.Lock()
	defer fs.mdLock.Unlock()
	np, code := fs.getPath(path)
	if code != fuse.OK {
		return code
	}

	inode := np.inode
	attr := np.inode.Get("attr").(types.Struct)
	xattr := attr.Get("xattr").(types.Map)
	blob := types.NewBlob(bytes.NewReader(data))

	xattr = xattr.Set(types.String(key), blob)
	attr = attr.Set("xattr", xattr)
	inode = inode.Set("attr", attr)

	fs.updateNode(np, inode)
	fs.splice(np)
	fs.commit()

	return fuse.OK
}
Exemplo n.º 7
0
func main() {
	profile.RegisterProfileFlags(flag.CommandLine)
	flag.Parse(true)

	buildCount := *count
	insertCount := buildCount / 50
	defer profile.MaybeStartProfile().Stop()

	collectionTypes := []string{"List", "Set", "Map"}
	buildFns := []buildCollectionFn{buildList, buildSet, buildMap}
	buildIncrFns := []buildCollectionFn{buildListIncrementally, buildSetIncrementally, buildMapIncrementally}
	readFns := []readCollectionFn{readList, readSet, readMap}

	elementTypes := []string{"numbers (8 B)", "strings (32 B)", "structs (64 B)"}
	elementSizes := []uint64{numberSize, stringSize, structSize}
	valueFns := []createValueFn{createNumber, createString, createStruct}

	for i, colType := range collectionTypes {
		fmt.Printf("Testing %s: \t\tbuild %d\t\t\tscan %d\t\t\tinsert %d\n", colType, buildCount, buildCount, insertCount)

		for j, elementType := range elementTypes {
			valueFn := valueFns[j]

			// Build One-Time
			ms := chunks.NewMemoryStore()
			ds := dataset.NewDataset(datas.NewDatabase(ms), "test")
			t1 := time.Now()
			col := buildFns[i](buildCount, valueFn)
			ds, err := ds.CommitValue(col)
			d.Chk.NoError(err)
			buildDuration := time.Since(t1)

			// Read
			t1 = time.Now()
			col = ds.HeadValue().(types.Collection)
			readFns[i](col)
			readDuration := time.Since(t1)

			// Build Incrementally
			ms = chunks.NewMemoryStore()
			ds = dataset.NewDataset(datas.NewDatabase(ms), "test")
			t1 = time.Now()
			col = buildIncrFns[i](insertCount, valueFn)
			ds, err = ds.CommitValue(col)
			d.Chk.NoError(err)
			incrDuration := time.Since(t1)

			elementSize := elementSizes[j]
			buildSize := elementSize * buildCount
			incrSize := elementSize * insertCount

			fmt.Printf("%s\t\t%s\t\t%s\t\t%s\n", elementType, rate(buildDuration, buildSize), rate(readDuration, buildSize), rate(incrDuration, incrSize))
		}
		fmt.Println()
	}

	fmt.Printf("Testing Blob: \t\tbuild %d MB\t\t\tscan %d MB\n", *blobSize/1000000, *blobSize/1000000)

	ms := chunks.NewMemoryStore()
	ds := dataset.NewDataset(datas.NewDatabase(ms), "test")

	blobBytes := makeBlobBytes(*blobSize)
	t1 := time.Now()
	blob := types.NewBlob(bytes.NewReader(blobBytes))
	ds.CommitValue(blob)
	buildDuration := time.Since(t1)

	ds = dataset.NewDataset(datas.NewDatabase(ms), "test")
	t1 = time.Now()
	blob = ds.HeadValue().(types.Blob)
	outBytes, _ := ioutil.ReadAll(blob.Reader())
	readDuration := time.Since(t1)
	d.Chk.True(bytes.Compare(blobBytes, outBytes) == 0)
	fmt.Printf("\t\t\t%s\t\t%s\n\n", rate(buildDuration, *blobSize), rate(readDuration, *blobSize))
}
Exemplo n.º 8
0
func main() {
	flag.Usage = func() {
		fmt.Fprintf(os.Stderr, "Fetches a URL into a noms blob\n\nUsage: %s <dataset> <url>:\n", os.Args[0])
		flag.PrintDefaults()
	}

	spec.RegisterDatabaseFlags()
	flag.Parse()

	if flag.NArg() != 2 {
		util.CheckError(errors.New("expected dataset and url arguments"))
	}

	ds, err := spec.GetDataset(flag.Arg(0))
	util.CheckError(err)
	defer ds.Database().Close()

	url := flag.Arg(1)
	start = time.Now()

	var pr io.Reader

	if strings.HasPrefix(url, "http") {
		resp, err := http.Get(url)
		if err != nil {
			fmt.Fprintf(os.Stderr, "Could not fetch url %s, error: %s\n", url, err)
			return
		}

		switch resp.StatusCode / 100 {
		case 4, 5:
			fmt.Fprintf(os.Stderr, "Could not fetch url %s, error: %d (%s)\n", url, resp.StatusCode, resp.Status)
			return
		}

		pr = progressreader.New(resp.Body, getStatusPrinter(resp.ContentLength))
	} else {
		// assume it's a file
		f, err := os.Open(url)
		if err != nil {
			fmt.Fprintf(os.Stderr, "Invalid URL %s - does not start with 'http' and isn't local file either. fopen error: %s", url, err)
			return
		}

		s, err := f.Stat()
		if err != nil {
			fmt.Fprintf(os.Stderr, "Could not stat file %s: %s", url, err)
			return
		}

		pr = progressreader.New(f, getStatusPrinter(s.Size()))
	}

	b := types.NewBlob(pr)
	ds, err = ds.Commit(b)
	if err != nil {
		d.Chk.True(datas.ErrMergeNeeded == err)
		fmt.Fprintf(os.Stderr, "Could not commit, optimistic concurrency failed.")
		return
	}

	status.Done()
	fmt.Println("Done")
}