Esempio n. 1
0
func runDiff(args []string) int {
	db1, value1, err := spec.GetPath(args[0])
	d.CheckErrorNoUsage(err)
	if value1 == nil {
		d.CheckErrorNoUsage(fmt.Errorf("Object not found: %s", args[0]))
	}
	defer db1.Close()

	db2, value2, err := spec.GetPath(args[1])
	d.CheckErrorNoUsage(err)
	if value2 == nil {
		d.CheckErrorNoUsage(fmt.Errorf("Object not found: %s", args[1]))
	}
	defer db2.Close()

	if summarize {
		diff.Summary(value1, value2)
		return 0
	}

	pgr := outputpager.Start()
	defer pgr.Stop()

	diff.Diff(pgr.Writer, value1, value2)
	return 0
}
Esempio n. 2
0
func (s *testSuite) TestImportFromFile() {
	assert := s.Assert()

	f, err := ioutil.TempFile("", "TestImportFromFile")
	assert.NoError(err)

	f.Write([]byte("abcdef"))
	f.Close()

	dsName := spec.CreateValueSpecString("ldb", s.LdbDir, "ds")
	s.Run(main, []string{f.Name(), dsName})

	db, blob, err := spec.GetPath(dsName + ".value")
	assert.NoError(err)

	expected := types.NewBlob(bytes.NewBufferString("abcdef"))
	assert.True(expected.Equals(blob))

	meta := db.Head("ds").Get(datas.MetaField).(types.Struct)
	metaDesc := meta.Type().Desc.(types.StructDesc)
	assert.Equal(2, metaDesc.Len())
	assert.NotNil(metaDesc.Field("date"))
	assert.Equal(f.Name(), string(meta.Get("file").(types.String)))

	db.Close()
}
Esempio n. 3
0
func (s *testSuite) TestImportFromStdin() {
	assert := s.Assert()

	oldStdin := os.Stdin
	newStdin, blobOut, err := os.Pipe()
	assert.NoError(err)

	os.Stdin = newStdin
	defer func() {
		os.Stdin = oldStdin
	}()

	go func() {
		blobOut.Write([]byte("abcdef"))
		blobOut.Close()
	}()

	dsName := spec.CreateValueSpecString("ldb", s.LdbDir, "ds")
	// Run() will return when blobOut is closed.
	s.Run(main, []string{"--stdin", dsName})

	db, blob, err := spec.GetPath(dsName + ".value")
	assert.NoError(err)

	expected := types.NewBlob(bytes.NewBufferString("abcdef"))
	assert.True(expected.Equals(blob))

	meta := db.Head("ds").Get(datas.MetaField).(types.Struct)
	// The meta should only have a "date" field.
	metaDesc := meta.Type().Desc.(types.StructDesc)
	assert.Equal(1, metaDesc.Len())
	assert.NotNil(metaDesc.Field("date"))

	db.Close()
}
Esempio n. 4
0
func main() {
	flag.Usage = func() {
		fmt.Fprintln(os.Stderr, "Shows a serialization of a Noms object\n")
		fmt.Fprintln(os.Stderr, "Usage: noms show <object>\n")
		flag.PrintDefaults()
		fmt.Fprintf(os.Stderr, "\nSee \"Spelling Objects\" at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the object argument.\n\n")
	}

	flag.Parse()
	if *showHelp {
		flag.Usage()
		return
	}

	if len(flag.Args()) != 1 {
		util.CheckError(errors.New("expected exactly one argument"))
	}

	database, value, err := spec.GetPath(flag.Arg(0))
	util.CheckError(err)

	waitChan := outputpager.PageOutput(!*outputpager.NoPager)

	types.WriteEncodedValueWithTags(os.Stdout, value)
	fmt.Fprintf(os.Stdout, "\n")
	database.Close()

	if waitChan != nil {
		os.Stdout.Close()
		<-waitChan
	}
}
Esempio n. 5
0
func main() {
	color = flag.Int("color", -1, "value of 1 forces color on, 2 forces color off")
	maxLines = flag.Int("max-lines", 10, "max number of lines to show per commit (-1 for all lines)")
	maxCommits = flag.Int("n", 0, "max number of commits to display (0 for all commits)")
	showHelp = flag.Bool("help", false, "show help text")
	showGraph = flag.Bool("graph", false, "show ascii-based commit hierarcy on left side of output")
	showValue = flag.Bool("show-value", false, "show commit value rather than diff information -- this is temporary")

	flag.Usage = func() {
		fmt.Fprintln(os.Stderr, "Displays the history of a Noms dataset\n")
		fmt.Fprintln(os.Stderr, "Usage: noms log <commitObject>")
		fmt.Fprintln(os.Stderr, "commitObject must be a dataset or object spec that refers to a commit.")
		flag.PrintDefaults()
		fmt.Fprintf(os.Stderr, "\nSee \"Spelling Objects\" at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the object argument.\n\n")
	}

	flag.Parse()
	if *showHelp {
		flag.Usage()
		return
	}

	if len(flag.Args()) != 1 {
		util.CheckError(errors.New("expected exactly one argument"))
	}

	useColor = shouldUseColor()

	database, value, err := spec.GetPath(flag.Arg(0))
	if err != nil {
		util.CheckErrorNoUsage(err)
	}
	defer database.Close()

	waitChan := outputpager.PageOutput(!*outputpager.NoPager)

	origCommit, ok := value.(types.Struct)
	if !ok || !origCommit.Type().Equals(datas.CommitType()) {
		util.CheckError(fmt.Errorf("%s does not reference a Commit object", flag.Arg(0)))
	}

	iter := NewCommitIterator(database, origCommit)
	displayed := 0
	if *maxCommits <= 0 {
		*maxCommits = math.MaxInt32
	}
	for ln, ok := iter.Next(); ok && displayed < *maxCommits; ln, ok = iter.Next() {
		if printCommit(ln, database) != nil {
			break
		}
		displayed++
	}

	if waitChan != nil {
		os.Stdout.Close()
		<-waitChan
	}
}
Esempio n. 6
0
func main() {
	flag.Usage = func() {
		fmt.Fprintln(os.Stderr, "Shows the difference between two objects\n")
		fmt.Fprintln(os.Stderr, "Usage: noms diff <object1> <object2>\n")
		flag.PrintDefaults()
		fmt.Fprintf(os.Stderr, "\nSee \"Spelling Objects\" at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the object argument.\n\n")
	}

	flag.Parse()
	if *showHelp {
		flag.Usage()
		return
	}

	if len(flag.Args()) != 2 {
		util.CheckError(errors.New("expected exactly two arguments"))
	}

	db1, value1, err := spec.GetPath(flag.Arg(0))
	util.CheckError(err)
	defer db1.Close()

	db2, value2, err := spec.GetPath(flag.Arg(1))
	util.CheckError(err)
	defer db2.Close()

	waitChan := outputpager.PageOutput(!*outputpager.NoPager)

	diff.Diff(os.Stdout, value1, value2)
	fmt.Fprintf(os.Stdout, "\n")

	if waitChan != nil {
		os.Stdout.Close()
		<-waitChan
	}
}
Esempio n. 7
0
func main() {
	flag.Usage = func() {
		fmt.Fprintf(os.Stderr, "usage: %s <dataset> <file>\n", os.Args[0])
		flag.PrintDefaults()
	}

	spec.RegisterDatabaseFlags(flag.CommandLine)
	flag.Parse(true)

	if len(flag.Args()) != 2 {
		d.CheckError(errors.New("expected dataset and file flags"))
	}

	var blob types.Blob
	path := flag.Arg(0)
	if db, val, err := spec.GetPath(path); err != nil {
		d.CheckErrorNoUsage(err)
	} else if val == nil {
		d.CheckErrorNoUsage(fmt.Errorf("No value at %s", path))
	} else if b, ok := val.(types.Blob); !ok {
		d.CheckErrorNoUsage(fmt.Errorf("Value at %s is not a blob", path))
	} else {
		defer db.Close()
		blob = b
	}

	filePath := flag.Arg(1)
	if filePath == "" {
		d.CheckErrorNoUsage(errors.New("Empty file path"))
	}

	// Note: overwrites any existing file.
	file, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE, 0644)
	d.CheckErrorNoUsage(err)
	defer file.Close()

	expected := humanize.Bytes(blob.Len())
	start := time.Now()

	progReader := progressreader.New(blob.Reader(), func(seen uint64) {
		elapsed := time.Since(start).Seconds()
		rate := uint64(float64(seen) / elapsed)
		status.Printf("%s of %s written in %ds (%s/s)...", humanize.Bytes(seen), expected, int(elapsed), humanize.Bytes(rate))
	})

	io.Copy(file, progReader)
	status.Done()
}
Esempio n. 8
0
func runLog(args []string) int {
	useColor = shouldUseColor()

	database, value, err := spec.GetPath(args[0])
	if err != nil {
		d.CheckErrorNoUsage(err)
	}
	defer database.Close()

	if value == nil {
		d.CheckErrorNoUsage(fmt.Errorf("Object not found: %s", args[0]))
	}

	origCommit, ok := value.(types.Struct)
	if !ok || !datas.IsCommitType(origCommit.Type()) {
		d.CheckError(fmt.Errorf("%s does not reference a Commit object", args[0]))
	}

	iter := NewCommitIterator(database, origCommit)
	displayed := 0
	if maxCommits <= 0 {
		maxCommits = math.MaxInt32
	}

	inChan := make(chan interface{}, parallelism)
	outChan := orderedparallel.New(inChan, func(node interface{}) interface{} {
		buff := &bytes.Buffer{}
		printCommit(node.(LogNode), buff, database)
		return buff.Bytes()
	}, parallelism)

	go func() {
		for ln, ok := iter.Next(); ok && displayed < maxCommits; ln, ok = iter.Next() {
			inChan <- ln
			displayed++
		}
		close(inChan)
	}()

	pgr := outputpager.Start()
	defer pgr.Stop()

	for commitBuff := range outChan {
		io.Copy(pgr.Writer, bytes.NewReader(commitBuff.([]byte)))
	}
	return 0
}
Esempio n. 9
0
func main() {
	cpuCount := runtime.NumCPU()
	runtime.GOMAXPROCS(cpuCount)

	flag.Usage = func() {
		fmt.Fprintf(os.Stderr, "Moves datasets between or within databases\n\n")
		fmt.Fprintf(os.Stderr, "noms sync [options] <source-object> <dest-dataset>\n\n")
		flag.PrintDefaults()
		fmt.Fprintf(os.Stderr, "\nFor detailed information on spelling objects and datasets, see: at https://github.com/attic-labs/noms/blob/master/doc/spelling.md.\n\n")
	}

	spec.RegisterDatabaseFlags()
	flag.Parse()

	if flag.NArg() != 2 {
		util.CheckError(errors.New("expected a source object and destination dataset"))
	}

	sourceStore, sourceObj, err := spec.GetPath(flag.Arg(0))
	util.CheckError(err)
	defer sourceStore.Close()

	sinkDataset, err := spec.GetDataset(flag.Arg(1))
	util.CheckError(err)
	defer sinkDataset.Database().Close()

	err = d.Try(func() {
		defer profile.MaybeStartProfile().Stop()

		var err error
		sinkDataset, err = sinkDataset.Pull(sourceStore, types.NewRef(sourceObj), int(*p))
		d.Exp.NoError(err)
	})

	if err != nil {
		log.Fatal(err)
	}
}
Esempio n. 10
0
func main() {
	// Actually the delimiter uses runes, which can be multiple characters long.
	// https://blog.golang.org/strings
	delimiter := flag.String("delimiter", ",", "field delimiter for csv file, must be exactly one character long.")
	comment := flag.String("comment", "", "comment to add to commit's meta data")
	header := flag.String("header", "", "header row. If empty, we'll use the first row of the file")
	name := flag.String("name", "Row", "struct name. The user-visible name to give to the struct type that will hold each row of data.")
	columnTypes := flag.String("column-types", "", "a comma-separated list of types representing the desired type of each column. if absent all types default to be String")
	pathDescription := "noms path to blob to import"
	path := flag.String("path", "", pathDescription)
	flag.StringVar(path, "p", "", pathDescription)
	dateFlag := flag.String("date", "", fmt.Sprintf(`date of commit in ISO 8601 format ("%s"). By default, the current date is used.`, dateFormat))
	noProgress := flag.Bool("no-progress", false, "prevents progress from being output if true")
	destType := flag.String("dest-type", "list", "the destination type to import to. can be 'list' or 'map:<pk>', where <pk> is the index position (0-based) of the column that is a the unique identifier for the column")
	skipRecords := flag.Uint("skip-records", 0, "number of records to skip at beginning of file")
	destTypePattern := regexp.MustCompile("^(list|map):(\\d+)$")

	spec.RegisterDatabaseFlags(flag.CommandLine)
	profile.RegisterProfileFlags(flag.CommandLine)

	flag.Usage = func() {
		fmt.Fprintf(os.Stderr, "Usage: csv-import [options] <csvfile> <dataset>\n\n")
		flag.PrintDefaults()
	}

	flag.Parse(true)

	var err error
	switch {
	case flag.NArg() == 0:
		err = errors.New("Maybe you put options after the dataset?")
	case flag.NArg() == 1 && *path == "":
		err = errors.New("If <csvfile> isn't specified, you must specify a noms path with -p")
	case flag.NArg() == 2 && *path != "":
		err = errors.New("Cannot specify both <csvfile> and a noms path with -p")
	case flag.NArg() > 2:
		err = errors.New("Too many arguments")
	}
	d.CheckError(err)

	var date = *dateFlag
	if date == "" {
		date = time.Now().UTC().Format(dateFormat)
	} else {
		_, err := time.Parse(dateFormat, date)
		d.CheckErrorNoUsage(err)
	}

	defer profile.MaybeStartProfile().Stop()

	var r io.Reader
	var size uint64
	var filePath string
	var dataSetArgN int

	if *path != "" {
		db, val, err := spec.GetPath(*path)
		d.CheckError(err)
		if val == nil {
			d.CheckError(fmt.Errorf("Path %s not found\n", *path))
		}
		blob, ok := val.(types.Blob)
		if !ok {
			d.CheckError(fmt.Errorf("Path %s not a Blob: %s\n", *path, types.EncodedValue(val.Type())))
		}
		defer db.Close()
		r = blob.Reader()
		size = blob.Len()
		dataSetArgN = 0
	} else {
		filePath = flag.Arg(0)
		res, err := os.Open(filePath)
		d.CheckError(err)
		defer res.Close()
		fi, err := res.Stat()
		d.CheckError(err)
		r = res
		size = uint64(fi.Size())
		dataSetArgN = 1
	}

	if !*noProgress {
		r = progressreader.New(r, getStatusPrinter(size))
	}

	comma, err := csv.StringToRune(*delimiter)
	d.CheckErrorNoUsage(err)

	var dest int
	var pk int
	if *destType == "list" {
		dest = destList
	} else if match := destTypePattern.FindStringSubmatch(*destType); match != nil {
		dest = destMap
		pk, err = strconv.Atoi(match[2])
		d.CheckErrorNoUsage(err)
	} else {
		fmt.Println("Invalid dest-type: ", *destType)
		return
	}

	cr := csv.NewCSVReader(r, comma)
	for i := uint(0); i < *skipRecords; i++ {
		cr.Read()
	}

	var headers []string
	if *header == "" {
		headers, err = cr.Read()
		d.PanicIfError(err)
	} else {
		headers = strings.Split(*header, string(comma))
	}

	ds, err := spec.GetDataset(flag.Arg(dataSetArgN))
	d.CheckError(err)
	defer ds.Database().Close()

	kinds := []types.NomsKind{}
	if *columnTypes != "" {
		kinds = csv.StringsToKinds(strings.Split(*columnTypes, ","))
	}

	var value types.Value
	if dest == destList {
		value, _ = csv.ReadToList(cr, *name, headers, kinds, ds.Database())
	} else {
		value = csv.ReadToMap(cr, headers, pk, kinds, ds.Database())
	}
	mi := metaInfoForCommit(date, filePath, *path, *comment)
	_, err = ds.Commit(value, dataset.CommitOptions{Meta: mi})
	if !*noProgress {
		status.Clear()
	}
	d.PanicIfError(err)
}