Esempio n. 1
0
func main() {
	chunks.RegisterLevelDBFlags(flag.CommandLine)
	dynFlags := chunks.DynamoFlags("")

	flag.Usage = usage
	flag.Parse(true)

	if *portFlag == 0 || *authKeyFlag == "" {
		usage()
		return
	}

	var factory chunks.Factory
	if factory = dynFlags.CreateFactory(); factory != nil {
		fmt.Printf("Using dynamo ...\n")
	} else if *ldbDir != "" {
		factory = chunks.NewLevelDBStoreFactoryUseFlags(*ldbDir)
		fmt.Printf("Using leveldb ...\n")
	} else {
		factory = chunks.NewMemoryStoreFactory()
		fmt.Printf("Using mem ...\n")
	}
	factory = &cachingReadThroughStoreFactory{chunks.NewMemoryStore(), factory}
	defer factory.Shutter()

	startWebServer(factory, *authKeyFlag)
}
Esempio n. 2
0
func main() {
	flag.Usage = func() {
		fmt.Fprintf(os.Stderr, "usage: %s [options] <dataset>\n", os.Args[0])
		flag.PrintDefaults()
	}

	spec.RegisterDatabaseFlags(flag.CommandLine)

	flag.Parse(true)

	if flag.NArg() != 1 {
		fmt.Fprintln(os.Stderr, "Missing required dataset argument")
		return
	}

	ds, err := spec.GetDataset(flag.Arg(0))
	if err != nil {
		fmt.Fprintf(os.Stderr, "Could not create dataset: %s\n", err)
		return
	}
	defer ds.Database().Close()

	newVal := uint64(1)
	if lastVal, ok := ds.MaybeHeadValue(); ok {
		newVal = uint64(lastVal.(types.Number)) + 1
	}

	_, err = ds.CommitValue(types.Number(newVal))
	if err != nil {
		fmt.Fprintf(os.Stderr, "Error committing: %s\n", err)
		return
	}

	fmt.Println(newVal)
}
Esempio n. 3
0
func main() {
	useSHA := flag.String("use-sha", "", "<default>=no hashing, 1=sha1, 256=sha256, 512=sha512, blake=blake2b")
	useBH := flag.Bool("use-bh", false, "whether we buzhash the bytes")
	flag.Parse(true)

	flag.Usage = func() {
		fmt.Printf("%s <big-file>\n", os.Args[0])
		flag.PrintDefaults()
		return
	}

	if len(flag.Args()) < 1 {
		flag.Usage()
		return
	}

	p := flag.Args()[0]
	bh := buzhash.NewBuzHash(64 * 8)
	f, _ := os.Open(p)
	defer f.Close()
	t0 := time.Now()
	buf := make([]byte, 4*1024)
	l := uint64(0)

	var h hash.Hash
	if *useSHA == "1" {
		h = sha1.New()
	} else if *useSHA == "256" {
		h = sha256.New()
	} else if *useSHA == "512" {
		h = sha512.New()
	} else if *useSHA == "blake" {
		h = blake2.NewBlake2B()
	}

	for {
		n, err := f.Read(buf)
		l += uint64(n)
		if err == io.EOF {
			break
		}
		s := buf[:n]
		if h != nil {
			h.Write(s)
		}
		if *useBH {
			bh.Write(s)
		}
	}

	t1 := time.Now()
	d := t1.Sub(t0)
	fmt.Printf("Read  %s in %s (%s/s)\n", humanize.Bytes(l), d, humanize.Bytes(uint64(float64(l)/d.Seconds())))
	digest := []byte{}
	if h != nil {
		fmt.Printf("%x\n", h.Sum(digest))
	}
}
Esempio n. 4
0
func main() {
	// use [from/to/by] or [from/iterations]
	nFrom := flag.Uint64("from", 1e2, "start iterations from this number")
	nTo := flag.Uint64("to", 1e4, "run iterations until arriving at this number")
	nBy := flag.Uint64("by", 1, "increment each iteration by this number")
	nIncrements := flag.Uint64("iterations", 0, "number of iterations to execute")
	encodingType := flag.String("encoding", "string", "encode/decode as 'string', 'binary', 'binary-int', 'binary-varint'")
	flag.Parse(true)

	flag.Usage = func() {
		fmt.Printf("%s\n", os.Args[0])
		flag.PrintDefaults()
		return
	}

	t0 := time.Now()
	nBytes := uint64(0)
	nIterations := uint64(0)

	encoderDecoder := getEncoder(*encodingType)
	startingLoop := newBigFloat(*nFrom)

	var endLoop *big.Float
	var incrementer *big.Float

	if *nIncrements > 0 {
		// using from/iterations flags
		fmt.Printf("encoding: %v from: %v iterations: %v\n", *encodingType, *nFrom, *nIncrements)

		incrementer = newBigFloat(1)
		n := newBigFloat(*nIncrements)
		endLoop = n.Add(n, startingLoop)
	} else {
		// using from/to/by flags
		fmt.Printf("encoding: %v from: %v to: %v by: %v\n", *encodingType, *nFrom, *nTo, *nBy)
		incrementer = newBigFloat(*nBy)
		endLoop = newBigFloat(*nTo)
	}

	for i := startingLoop; i.Cmp(endLoop) < 0; i = i.Add(i, incrementer) {
		nIterations++
		nBytes += runTest(encoderDecoder, i)
	}

	t1 := time.Now()
	d := t1.Sub(t0)
	fmt.Printf("IO  %s (%v nums) in %s (%s/s)\n", humanize.Bytes(nBytes), humanize.Comma(int64(nIterations)), d, humanize.Bytes(uint64(float64(nBytes)/d.Seconds())))
}
Esempio n. 5
0
func main() {
	flag.Usage = func() {
		fmt.Fprintf(os.Stderr, "usage: %s <dataset> <file>\n", os.Args[0])
		flag.PrintDefaults()
	}

	spec.RegisterDatabaseFlags(flag.CommandLine)
	flag.Parse(true)

	if len(flag.Args()) != 2 {
		d.CheckError(errors.New("expected dataset and file flags"))
	}

	var blob types.Blob
	path := flag.Arg(0)
	if db, val, err := spec.GetPath(path); err != nil {
		d.CheckErrorNoUsage(err)
	} else if val == nil {
		d.CheckErrorNoUsage(fmt.Errorf("No value at %s", path))
	} else if b, ok := val.(types.Blob); !ok {
		d.CheckErrorNoUsage(fmt.Errorf("Value at %s is not a blob", path))
	} else {
		defer db.Close()
		blob = b
	}

	filePath := flag.Arg(1)
	if filePath == "" {
		d.CheckErrorNoUsage(errors.New("Empty file path"))
	}

	// Note: overwrites any existing file.
	file, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE, 0644)
	d.CheckErrorNoUsage(err)
	defer file.Close()

	expected := humanize.Bytes(blob.Len())
	start := time.Now()

	progReader := progressreader.New(blob.Reader(), func(seen uint64) {
		elapsed := time.Since(start).Seconds()
		rate := uint64(float64(seen) / elapsed)
		status.Printf("%s of %s written in %ds (%s/s)...", humanize.Bytes(seen), expected, int(elapsed), humanize.Bytes(rate))
	})

	io.Copy(file, progReader)
	status.Done()
}
Esempio n. 6
0
func main() {
	usage := func() {
		fmt.Fprintln(os.Stderr, "Usage: noms ui [-host HOST] directory [args...]\n")
		fmt.Fprintln(os.Stderr, "  args are of the form arg1=val1, arg2=val2, etc. \"ldb:\" values are automatically translated into paths to an HTTP noms database server.\n")
		flag.PrintDefaults()
	}

	flag.Parse(true)
	flag.Usage = usage

	if len(flag.Args()) == 0 {
		usage()
		os.Exit(1)
	}

	uiDir := flag.Arg(0)
	qsValues, stores := constructQueryString(flag.Args()[1:])

	router := &httprouter.Router{
		HandleMethodNotAllowed: true,
		NotFound:               http.FileServer(http.Dir(uiDir)),
		RedirectFixedPath:      true,
	}

	prefix := dsPathPrefix + "/:store"
	router.POST(prefix+constants.GetRefsPath, routeToStore(stores, datas.HandleGetRefs))
	router.OPTIONS(prefix+constants.GetRefsPath, routeToStore(stores, datas.HandleGetRefs))
	router.GET(prefix+constants.RootPath, routeToStore(stores, datas.HandleRootGet))
	router.POST(prefix+constants.RootPath, routeToStore(stores, datas.HandleRootPost))
	router.OPTIONS(prefix+constants.RootPath, routeToStore(stores, datas.HandleRootGet))

	l, err := net.Listen("tcp", fmt.Sprintf("localhost:%d", *portFlag))
	d.Chk.NoError(err)

	srv := &http.Server{
		Handler: http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
			router.ServeHTTP(w, req)
		}),
	}

	qs := ""
	if len(qsValues) > 0 {
		qs = "?" + qsValues.Encode()
	}

	fmt.Printf("Starting UI %s at http://%s%s\n", uiDir, l.Addr().String(), qs)
	log.Fatal(srv.Serve(l))
}
Esempio n. 7
0
func main() {
	var dsStr = flag.String("ds", "", "noms dataset to read/write from")

	flag.Usage = func() {
		fmt.Fprintf(os.Stderr, "Usage: %s [flags] [command] [command-args]\n\n", os.Args[0])
		fmt.Fprintln(os.Stderr, "Flags:")
		flag.PrintDefaults()
		fmt.Fprintln(os.Stderr, "\nCommands:")
		fmt.Fprintln(os.Stderr, "\tadd-person <id> <name> <title>")
		fmt.Fprintln(os.Stderr, "\tlist-persons")
	}

	flag.Parse(true)

	if flag.NArg() == 0 {
		fmt.Fprintln(os.Stderr, "Not enough arguments")
		return
	}

	if *dsStr == "" {
		fmt.Fprintln(os.Stderr, "Required flag '--ds' not set")
		return
	}

	ds, err := spec.GetDataset(*dsStr)
	if err != nil {
		fmt.Fprintf(os.Stderr, "Could not create dataset: %s\n", err)
		return
	}
	defer ds.Database().Close()

	switch flag.Arg(0) {
	case "add-person":
		addPerson(ds)
	case "list-persons":
		listPersons(ds)
	default:
		fmt.Fprintf(os.Stderr, "Unknown command: %s\n", flag.Arg(0))
	}
}
Esempio n. 8
0
func main() {
	flag.Usage = func() {
		fmt.Fprintf(os.Stderr, "usage: %s <url> <dataset>\n", os.Args[0])
		flag.PrintDefaults()
	}

	spec.RegisterDatabaseFlags(flag.CommandLine)
	flag.Parse(true)

	if len(flag.Args()) != 2 {
		d.CheckError(errors.New("expected url and dataset flags"))
	}

	ds, err := spec.GetDataset(flag.Arg(1))
	d.CheckError(err)

	url := flag.Arg(0)
	if url == "" {
		flag.Usage()
	}

	res, err := http.Get(url)
	if err != nil {
		log.Fatalf("Error fetching %s: %+v\n", url, err)
	} else if res.StatusCode != 200 {
		log.Fatalf("Error fetching %s: %s\n", url, res.Status)
	}
	defer res.Body.Close()

	var jsonObject interface{}
	err = json.NewDecoder(res.Body).Decode(&jsonObject)
	if err != nil {
		log.Fatalln("Error decoding JSON: ", err)
	}

	_, err = ds.CommitValue(jsontonoms.NomsValueFromDecodedJSON(jsonObject, true))
	d.PanicIfError(err)
	ds.Database().Close()
}
Esempio n. 9
0
func main() {
	flag.Usage = func() {
		fmt.Fprintf(os.Stderr, "Usage of %s:\n  %s path/to/staging/dir\n", os.Args[0], os.Args[0])
	}
	flag.Parse(true)
	if flag.Arg(0) == "" {
		flag.Usage()
		os.Exit(1)
	}
	err := d.Try(func() {
		stagingDir, err := filepath.Abs(flag.Arg(0))
		d.PanicIfTrue(err != nil, "Path to staging directory (first arg) must be valid, not %s", flag.Arg(0))
		d.PanicIfError(os.MkdirAll(stagingDir, 0755))

		goPath := os.Getenv("GOPATH")
		d.PanicIfTrue(goPath == "", "GOPATH must be set!")
		workspace := os.Getenv("WORKSPACE")
		if workspace == "" {
			fmt.Printf("WORKSPACE not set in environment; using GOPATH (%s).\n", goPath)
			workspace = goPath
		}
		pythonPath := filepath.Join(goPath, nomsCheckoutPath, "tools")
		env := runner.Env{
			"PYTHONPATH": pythonPath,
		}

		if !runner.Serial(os.Stdout, os.Stderr, env, ".", buildScript) {
			os.Exit(1)
		}

		if !runner.Serial(os.Stdout, os.Stderr, env, ".", stageScript, stagingDir) {
			os.Exit(1)
		}
	})
	if err != nil {
		log.Fatal(err)
	}
}
Esempio n. 10
0
func main() {
	// Actually the delimiter uses runes, which can be multiple characters long.
	// https://blog.golang.org/strings
	delimiter := flag.String("delimiter", ",", "field delimiter for csv file, must be exactly one character long.")

	spec.RegisterDatabaseFlags(flag.CommandLine)
	profile.RegisterProfileFlags(flag.CommandLine)

	flag.Usage = func() {
		fmt.Fprintln(os.Stderr, "Usage: csv-export [options] dataset > filename")
		flag.PrintDefaults()
	}

	flag.Parse(true)

	if flag.NArg() != 1 {
		d.CheckError(errors.New("expected dataset arg"))
	}

	ds, err := spec.GetDataset(flag.Arg(0))
	d.CheckError(err)

	defer ds.Database().Close()

	comma, err := csv.StringToRune(*delimiter)
	d.CheckError(err)

	err = d.Try(func() {
		defer profile.MaybeStartProfile().Stop()

		nomsList, structDesc := csv.ValueToListAndElemDesc(ds.HeadValue(), ds.Database())
		csv.Write(nomsList, structDesc, comma, os.Stdout)
	})
	if err != nil {
		fmt.Println("Failed to export dataset as CSV:")
		fmt.Println(err)
	}
}
Esempio n. 11
0
func main() {
	flag.Usage = usage
	flag.Parse(false)

	args := flag.Args()
	if len(args) < 1 {
		usage()
		return
	}

	if args[0] == "help" {
		help(args[1:])
		return
	}

	for _, cmd := range commands {
		if cmd.Name() == args[0] {
			flags := cmd.Flags()
			flags.Usage = cmd.Usage

			flags.Parse(true, args[1:])
			args = flags.Args()
			if cmd.Nargs != 0 && len(args) < cmd.Nargs {
				cmd.Usage()
			}
			exitCode := cmd.Run(args)
			if exitCode != 0 {
				os.Exit(exitCode)
			}
			return
		}
	}

	fmt.Fprintf(os.Stderr, "noms: unknown command %q\n", args[0])
	usage()
}
Esempio n. 12
0
func main() {
	err := d.Try(func() {
		spec.RegisterDatabaseFlags(flag.CommandLine)
		profile.RegisterProfileFlags(flag.CommandLine)
		flag.Usage = customUsage
		flag.Parse(true)

		if flag.NArg() != 2 {
			d.CheckError(errors.New("Expected directory path followed by dataset"))
		}
		dir := flag.Arg(0)
		ds, err := spec.GetDataset(flag.Arg(1))
		d.CheckError(err)

		defer profile.MaybeStartProfile().Stop()

		cpuCount := runtime.NumCPU()

		filesChan := make(chan fileIndex, 1024)
		refsChan := make(chan refIndex, 1024)

		getFilePaths := func() {
			index := 0
			err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
				d.PanicIfTrue(err != nil, "Cannot traverse directories")
				if !info.IsDir() && filepath.Ext(path) == ".xml" {
					filesChan <- fileIndex{path, index}
					index++
				}

				return nil
			})
			d.PanicIfError(err)
			close(filesChan)
		}

		wg := sync.WaitGroup{}
		importXML := func() {
			expectedType := types.NewMap()
			for f := range filesChan {
				file, err := os.Open(f.path)
				d.PanicIfTrue(err != nil, "Error getting XML")

				xmlObject, err := mxj.NewMapXmlReader(file)
				d.PanicIfTrue(err != nil, "Error decoding XML")
				object := xmlObject.Old()
				file.Close()

				nomsObj := jsontonoms.NomsValueFromDecodedJSON(object, false)
				d.Chk.IsType(expectedType, nomsObj)

				var r types.Ref
				if !*noIO {
					r = ds.Database().WriteValue(nomsObj)
				}

				refsChan <- refIndex{r, f.index}
			}

			wg.Done()
		}

		go getFilePaths()
		for i := 0; i < cpuCount*8; i++ {
			wg.Add(1)
			go importXML()
		}
		go func() {
			wg.Wait()
			close(refsChan) // done converting xml to noms
		}()

		refList := refIndexList{}
		for r := range refsChan {
			refList = append(refList, r)
		}
		sort.Sort(refList)

		refs := make([]types.Value, len(refList))
		for idx, r := range refList {
			refs[idx] = r.ref
		}

		rl := types.NewList(refs...)

		if !*noIO {
			_, err := ds.CommitValue(rl)
			d.PanicIfError(err)
		}
	})

	if err != nil {
		log.Fatal(err)
	}
}
Esempio n. 13
0
func main() {
	profile.RegisterProfileFlags(flag.CommandLine)
	flag.Parse(true)

	buildCount := *count
	insertCount := buildCount / 50
	defer profile.MaybeStartProfile().Stop()

	collectionTypes := []string{"List", "Set", "Map"}
	buildFns := []buildCollectionFn{buildList, buildSet, buildMap}
	buildIncrFns := []buildCollectionFn{buildListIncrementally, buildSetIncrementally, buildMapIncrementally}
	readFns := []readCollectionFn{readList, readSet, readMap}

	elementTypes := []string{"numbers (8 B)", "strings (32 B)", "structs (64 B)"}
	elementSizes := []uint64{numberSize, stringSize, structSize}
	valueFns := []createValueFn{createNumber, createString, createStruct}

	for i, colType := range collectionTypes {
		fmt.Printf("Testing %s: \t\tbuild %d\t\t\tscan %d\t\t\tinsert %d\n", colType, buildCount, buildCount, insertCount)

		for j, elementType := range elementTypes {
			valueFn := valueFns[j]

			// Build One-Time
			ms := chunks.NewMemoryStore()
			ds := dataset.NewDataset(datas.NewDatabase(ms), "test")
			t1 := time.Now()
			col := buildFns[i](buildCount, valueFn)
			ds, err := ds.CommitValue(col)
			d.Chk.NoError(err)
			buildDuration := time.Since(t1)

			// Read
			t1 = time.Now()
			col = ds.HeadValue().(types.Collection)
			readFns[i](col)
			readDuration := time.Since(t1)

			// Build Incrementally
			ms = chunks.NewMemoryStore()
			ds = dataset.NewDataset(datas.NewDatabase(ms), "test")
			t1 = time.Now()
			col = buildIncrFns[i](insertCount, valueFn)
			ds, err = ds.CommitValue(col)
			d.Chk.NoError(err)
			incrDuration := time.Since(t1)

			elementSize := elementSizes[j]
			buildSize := elementSize * buildCount
			incrSize := elementSize * insertCount

			fmt.Printf("%s\t\t%s\t\t%s\t\t%s\n", elementType, rate(buildDuration, buildSize), rate(readDuration, buildSize), rate(incrDuration, incrSize))
		}
		fmt.Println()
	}

	fmt.Printf("Testing Blob: \t\tbuild %d MB\t\t\tscan %d MB\n", *blobSize/1000000, *blobSize/1000000)

	ms := chunks.NewMemoryStore()
	ds := dataset.NewDataset(datas.NewDatabase(ms), "test")

	blobBytes := makeBlobBytes(*blobSize)
	t1 := time.Now()
	blob := types.NewBlob(bytes.NewReader(blobBytes))
	ds.CommitValue(blob)
	buildDuration := time.Since(t1)

	ds = dataset.NewDataset(datas.NewDatabase(ms), "test")
	t1 = time.Now()
	blob = ds.HeadValue().(types.Blob)
	outBytes, _ := ioutil.ReadAll(blob.Reader())
	readDuration := time.Since(t1)
	d.Chk.True(bytes.Compare(blobBytes, outBytes) == 0)
	fmt.Printf("\t\t\t%s\t\t%s\n\n", rate(buildDuration, *blobSize), rate(readDuration, *blobSize))
}
Esempio n. 14
0
func main() {
	comment := flag.String("comment", "", "comment to add to commit's meta data")
	spec.RegisterDatabaseFlags(flag.CommandLine)

	flag.Usage = func() {
		fmt.Fprintf(os.Stderr, "Fetches a URL (or file) into a noms blob\n\nUsage: %s <url-or-local-path> <dataset>:\n", os.Args[0])
		flag.PrintDefaults()
	}
	flag.Parse(true)

	if flag.NArg() != 2 {
		d.CheckErrorNoUsage(errors.New("expected dataset and url arguments"))
	}

	ds, err := spec.GetDataset(flag.Arg(1))
	d.CheckErrorNoUsage(err)
	defer ds.Database().Close()

	url := flag.Arg(0)
	fileOrUrl := "file"
	start = time.Now()

	var pr io.Reader

	if strings.HasPrefix(url, "http") {
		resp, err := http.Get(url)
		if err != nil {
			fmt.Fprintf(os.Stderr, "Could not fetch url %s, error: %s\n", url, err)
			return
		}

		switch resp.StatusCode / 100 {
		case 4, 5:
			fmt.Fprintf(os.Stderr, "Could not fetch url %s, error: %d (%s)\n", url, resp.StatusCode, resp.Status)
			return
		}

		pr = progressreader.New(resp.Body, getStatusPrinter(resp.ContentLength))
		fileOrUrl = "url"
	} else {
		// assume it's a file
		f, err := os.Open(url)
		if err != nil {
			fmt.Fprintf(os.Stderr, "Invalid URL %s - does not start with 'http' and isn't local file either. fopen error: %s", url, err)
			return
		}

		s, err := f.Stat()
		if err != nil {
			fmt.Fprintf(os.Stderr, "Could not stat file %s: %s", url, err)
			return
		}

		pr = progressreader.New(f, getStatusPrinter(s.Size()))
		fileOrUrl = "file"
	}

	b := types.NewStreamingBlob(pr, ds.Database())
	mi := metaInfoForCommit(fileOrUrl, url, *comment)
	ds, err = ds.Commit(b, dataset.CommitOptions{Meta: mi})
	if err != nil {
		d.Chk.Equal(datas.ErrMergeNeeded, err)
		fmt.Fprintf(os.Stderr, "Could not commit, optimistic concurrency failed.")
		return
	}

	status.Done()
	fmt.Println("Done")
}
Esempio n. 15
0
func main() {
	// Actually the delimiter uses runes, which can be multiple characters long.
	// https://blog.golang.org/strings
	delimiter := flag.String("delimiter", ",", "field delimiter for csv file, must be exactly one character long.")
	comment := flag.String("comment", "", "comment to add to commit's meta data")
	header := flag.String("header", "", "header row. If empty, we'll use the first row of the file")
	name := flag.String("name", "Row", "struct name. The user-visible name to give to the struct type that will hold each row of data.")
	columnTypes := flag.String("column-types", "", "a comma-separated list of types representing the desired type of each column. if absent all types default to be String")
	pathDescription := "noms path to blob to import"
	path := flag.String("path", "", pathDescription)
	flag.StringVar(path, "p", "", pathDescription)
	dateFlag := flag.String("date", "", fmt.Sprintf(`date of commit in ISO 8601 format ("%s"). By default, the current date is used.`, dateFormat))
	noProgress := flag.Bool("no-progress", false, "prevents progress from being output if true")
	destType := flag.String("dest-type", "list", "the destination type to import to. can be 'list' or 'map:<pk>', where <pk> is the index position (0-based) of the column that is a the unique identifier for the column")
	skipRecords := flag.Uint("skip-records", 0, "number of records to skip at beginning of file")
	destTypePattern := regexp.MustCompile("^(list|map):(\\d+)$")

	spec.RegisterDatabaseFlags(flag.CommandLine)
	profile.RegisterProfileFlags(flag.CommandLine)

	flag.Usage = func() {
		fmt.Fprintf(os.Stderr, "Usage: csv-import [options] <csvfile> <dataset>\n\n")
		flag.PrintDefaults()
	}

	flag.Parse(true)

	var err error
	switch {
	case flag.NArg() == 0:
		err = errors.New("Maybe you put options after the dataset?")
	case flag.NArg() == 1 && *path == "":
		err = errors.New("If <csvfile> isn't specified, you must specify a noms path with -p")
	case flag.NArg() == 2 && *path != "":
		err = errors.New("Cannot specify both <csvfile> and a noms path with -p")
	case flag.NArg() > 2:
		err = errors.New("Too many arguments")
	}
	d.CheckError(err)

	var date = *dateFlag
	if date == "" {
		date = time.Now().UTC().Format(dateFormat)
	} else {
		_, err := time.Parse(dateFormat, date)
		d.CheckErrorNoUsage(err)
	}

	defer profile.MaybeStartProfile().Stop()

	var r io.Reader
	var size uint64
	var filePath string
	var dataSetArgN int

	if *path != "" {
		db, val, err := spec.GetPath(*path)
		d.CheckError(err)
		if val == nil {
			d.CheckError(fmt.Errorf("Path %s not found\n", *path))
		}
		blob, ok := val.(types.Blob)
		if !ok {
			d.CheckError(fmt.Errorf("Path %s not a Blob: %s\n", *path, types.EncodedValue(val.Type())))
		}
		defer db.Close()
		r = blob.Reader()
		size = blob.Len()
		dataSetArgN = 0
	} else {
		filePath = flag.Arg(0)
		res, err := os.Open(filePath)
		d.CheckError(err)
		defer res.Close()
		fi, err := res.Stat()
		d.CheckError(err)
		r = res
		size = uint64(fi.Size())
		dataSetArgN = 1
	}

	if !*noProgress {
		r = progressreader.New(r, getStatusPrinter(size))
	}

	comma, err := csv.StringToRune(*delimiter)
	d.CheckErrorNoUsage(err)

	var dest int
	var pk int
	if *destType == "list" {
		dest = destList
	} else if match := destTypePattern.FindStringSubmatch(*destType); match != nil {
		dest = destMap
		pk, err = strconv.Atoi(match[2])
		d.CheckErrorNoUsage(err)
	} else {
		fmt.Println("Invalid dest-type: ", *destType)
		return
	}

	cr := csv.NewCSVReader(r, comma)
	for i := uint(0); i < *skipRecords; i++ {
		cr.Read()
	}

	var headers []string
	if *header == "" {
		headers, err = cr.Read()
		d.PanicIfError(err)
	} else {
		headers = strings.Split(*header, string(comma))
	}

	ds, err := spec.GetDataset(flag.Arg(dataSetArgN))
	d.CheckError(err)
	defer ds.Database().Close()

	kinds := []types.NomsKind{}
	if *columnTypes != "" {
		kinds = csv.StringsToKinds(strings.Split(*columnTypes, ","))
	}

	var value types.Value
	if dest == destList {
		value, _ = csv.ReadToList(cr, *name, headers, kinds, ds.Database())
	} else {
		value = csv.ReadToMap(cr, headers, pk, kinds, ds.Database())
	}
	mi := metaInfoForCommit(date, filePath, *path, *comment)
	_, err = ds.Commit(value, dataset.CommitOptions{Meta: mi})
	if !*noProgress {
		status.Clear()
	}
	d.PanicIfError(err)
}
Esempio n. 16
0
func main() {
	comment := flag.String("comment", "", "comment to add to commit's meta data")
	stdin := flag.Bool("stdin", false, "read blob from stdin")

	spec.RegisterDatabaseFlags(flag.CommandLine)

	flag.Usage = func() {
		fmt.Fprintf(os.Stderr, "Fetches a URL, file, or stdin into a noms blob\n\nUsage: %s [--stdin?] [url-or-local-path?] [dataset]\n", os.Args[0])
		flag.PrintDefaults()
	}
	flag.Parse(true)

	if !(*stdin && flag.NArg() == 1) && flag.NArg() != 2 {
		flag.Usage()
		os.Exit(-1)
	}

	start = time.Now()

	ds, err := spec.GetDataset(flag.Arg(flag.NArg() - 1))
	d.CheckErrorNoUsage(err)
	defer ds.Database().Close()

	var r io.Reader
	var contentLength int64
	var sourceType, sourceVal string

	if *stdin {
		r = os.Stdin
		contentLength = -1
	} else if url := flag.Arg(0); strings.HasPrefix(url, "http") {
		resp, err := http.Get(url)
		if err != nil {
			fmt.Fprintf(os.Stderr, "Could not fetch url %s, error: %s\n", url, err)
			return
		}

		switch resp.StatusCode / 100 {
		case 4, 5:
			fmt.Fprintf(os.Stderr, "Could not fetch url %s, error: %d (%s)\n", url, resp.StatusCode, resp.Status)
			return
		}

		r = resp.Body
		contentLength = resp.ContentLength
		sourceType, sourceVal = "url", url
	} else {
		// assume it's a file
		f, err := os.Open(url)
		if err != nil {
			fmt.Fprintf(os.Stderr, "Invalid URL %s - does not start with 'http' and isn't local file either. fopen error: %s", url, err)
			return
		}

		s, err := f.Stat()
		if err != nil {
			fmt.Fprintf(os.Stderr, "Could not stat file %s: %s", url, err)
			return
		}

		r = f
		contentLength = s.Size()
		sourceType, sourceVal = "file", url
	}

	pr := progressreader.New(r, getStatusPrinter(contentLength))
	b := types.NewStreamingBlob(pr, ds.Database())
	mi := metaInfoForCommit(sourceType, sourceVal, *comment)
	ds, err = ds.Commit(b, dataset.CommitOptions{Meta: mi})
	if err != nil {
		d.Chk.Equal(datas.ErrMergeNeeded, err)
		fmt.Fprintf(os.Stderr, "Could not commit, optimistic concurrency failed.")
		return
	}

	status.Done()
	fmt.Println("Done")
}