func main() { flag.Usage = func() { fmt.Fprintf(os.Stderr, "usage: %s [options] <dataset>\n", os.Args[0]) flag.PrintDefaults() } spec.RegisterDatabaseFlags(flag.CommandLine) flag.Parse(true) if flag.NArg() != 1 { fmt.Fprintln(os.Stderr, "Missing required dataset argument") return } ds, err := spec.GetDataset(flag.Arg(0)) if err != nil { fmt.Fprintf(os.Stderr, "Could not create dataset: %s\n", err) return } defer ds.Database().Close() newVal := uint64(1) if lastVal, ok := ds.MaybeHeadValue(); ok { newVal = uint64(lastVal.(types.Number)) + 1 } _, err = ds.CommitValue(types.Number(newVal)) if err != nil { fmt.Fprintf(os.Stderr, "Error committing: %s\n", err) return } fmt.Println(newVal) }
func main() { useSHA := flag.String("use-sha", "", "<default>=no hashing, 1=sha1, 256=sha256, 512=sha512, blake=blake2b") useBH := flag.Bool("use-bh", false, "whether we buzhash the bytes") flag.Parse(true) flag.Usage = func() { fmt.Printf("%s <big-file>\n", os.Args[0]) flag.PrintDefaults() return } if len(flag.Args()) < 1 { flag.Usage() return } p := flag.Args()[0] bh := buzhash.NewBuzHash(64 * 8) f, _ := os.Open(p) defer f.Close() t0 := time.Now() buf := make([]byte, 4*1024) l := uint64(0) var h hash.Hash if *useSHA == "1" { h = sha1.New() } else if *useSHA == "256" { h = sha256.New() } else if *useSHA == "512" { h = sha512.New() } else if *useSHA == "blake" { h = blake2.NewBlake2B() } for { n, err := f.Read(buf) l += uint64(n) if err == io.EOF { break } s := buf[:n] if h != nil { h.Write(s) } if *useBH { bh.Write(s) } } t1 := time.Now() d := t1.Sub(t0) fmt.Printf("Read %s in %s (%s/s)\n", humanize.Bytes(l), d, humanize.Bytes(uint64(float64(l)/d.Seconds()))) digest := []byte{} if h != nil { fmt.Printf("%x\n", h.Sum(digest)) } }
func main() { // use [from/to/by] or [from/iterations] nFrom := flag.Uint64("from", 1e2, "start iterations from this number") nTo := flag.Uint64("to", 1e4, "run iterations until arriving at this number") nBy := flag.Uint64("by", 1, "increment each iteration by this number") nIncrements := flag.Uint64("iterations", 0, "number of iterations to execute") encodingType := flag.String("encoding", "string", "encode/decode as 'string', 'binary', 'binary-int', 'binary-varint'") flag.Parse(true) flag.Usage = func() { fmt.Printf("%s\n", os.Args[0]) flag.PrintDefaults() return } t0 := time.Now() nBytes := uint64(0) nIterations := uint64(0) encoderDecoder := getEncoder(*encodingType) startingLoop := newBigFloat(*nFrom) var endLoop *big.Float var incrementer *big.Float if *nIncrements > 0 { // using from/iterations flags fmt.Printf("encoding: %v from: %v iterations: %v\n", *encodingType, *nFrom, *nIncrements) incrementer = newBigFloat(1) n := newBigFloat(*nIncrements) endLoop = n.Add(n, startingLoop) } else { // using from/to/by flags fmt.Printf("encoding: %v from: %v to: %v by: %v\n", *encodingType, *nFrom, *nTo, *nBy) incrementer = newBigFloat(*nBy) endLoop = newBigFloat(*nTo) } for i := startingLoop; i.Cmp(endLoop) < 0; i = i.Add(i, incrementer) { nIterations++ nBytes += runTest(encoderDecoder, i) } t1 := time.Now() d := t1.Sub(t0) fmt.Printf("IO %s (%v nums) in %s (%s/s)\n", humanize.Bytes(nBytes), humanize.Comma(int64(nIterations)), d, humanize.Bytes(uint64(float64(nBytes)/d.Seconds()))) }
func main() { flag.Usage = func() { fmt.Fprintf(os.Stderr, "usage: %s <dataset> <file>\n", os.Args[0]) flag.PrintDefaults() } spec.RegisterDatabaseFlags(flag.CommandLine) flag.Parse(true) if len(flag.Args()) != 2 { d.CheckError(errors.New("expected dataset and file flags")) } var blob types.Blob path := flag.Arg(0) if db, val, err := spec.GetPath(path); err != nil { d.CheckErrorNoUsage(err) } else if val == nil { d.CheckErrorNoUsage(fmt.Errorf("No value at %s", path)) } else if b, ok := val.(types.Blob); !ok { d.CheckErrorNoUsage(fmt.Errorf("Value at %s is not a blob", path)) } else { defer db.Close() blob = b } filePath := flag.Arg(1) if filePath == "" { d.CheckErrorNoUsage(errors.New("Empty file path")) } // Note: overwrites any existing file. file, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE, 0644) d.CheckErrorNoUsage(err) defer file.Close() expected := humanize.Bytes(blob.Len()) start := time.Now() progReader := progressreader.New(blob.Reader(), func(seen uint64) { elapsed := time.Since(start).Seconds() rate := uint64(float64(seen) / elapsed) status.Printf("%s of %s written in %ds (%s/s)...", humanize.Bytes(seen), expected, int(elapsed), humanize.Bytes(rate)) }) io.Copy(file, progReader) status.Done() }
func main() { usage := func() { fmt.Fprintln(os.Stderr, "Usage: noms ui [-host HOST] directory [args...]\n") fmt.Fprintln(os.Stderr, " args are of the form arg1=val1, arg2=val2, etc. \"ldb:\" values are automatically translated into paths to an HTTP noms database server.\n") flag.PrintDefaults() } flag.Parse(true) flag.Usage = usage if len(flag.Args()) == 0 { usage() os.Exit(1) } uiDir := flag.Arg(0) qsValues, stores := constructQueryString(flag.Args()[1:]) router := &httprouter.Router{ HandleMethodNotAllowed: true, NotFound: http.FileServer(http.Dir(uiDir)), RedirectFixedPath: true, } prefix := dsPathPrefix + "/:store" router.POST(prefix+constants.GetRefsPath, routeToStore(stores, datas.HandleGetRefs)) router.OPTIONS(prefix+constants.GetRefsPath, routeToStore(stores, datas.HandleGetRefs)) router.GET(prefix+constants.RootPath, routeToStore(stores, datas.HandleRootGet)) router.POST(prefix+constants.RootPath, routeToStore(stores, datas.HandleRootPost)) router.OPTIONS(prefix+constants.RootPath, routeToStore(stores, datas.HandleRootGet)) l, err := net.Listen("tcp", fmt.Sprintf("localhost:%d", *portFlag)) d.Chk.NoError(err) srv := &http.Server{ Handler: http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { router.ServeHTTP(w, req) }), } qs := "" if len(qsValues) > 0 { qs = "?" + qsValues.Encode() } fmt.Printf("Starting UI %s at http://%s%s\n", uiDir, l.Addr().String(), qs) log.Fatal(srv.Serve(l)) }
func getEncoder(name string) EncoderDecoder { if name == "string" { return StringEncodedDecoder() } else if name == "binary" { return NewBinaryEncoderDecoder() } else if name == "binary-int" { return NewBinaryIntEncoderDecoder() } else if name == "binary-varint" { return NewBinaryVarintEncoderDecoder() } else { fmt.Printf("Unknown encoding specified: %s\n", name) flag.PrintDefaults() os.Exit(1) return nil } }
func main() { var dsStr = flag.String("ds", "", "noms dataset to read/write from") flag.Usage = func() { fmt.Fprintf(os.Stderr, "Usage: %s [flags] [command] [command-args]\n\n", os.Args[0]) fmt.Fprintln(os.Stderr, "Flags:") flag.PrintDefaults() fmt.Fprintln(os.Stderr, "\nCommands:") fmt.Fprintln(os.Stderr, "\tadd-person <id> <name> <title>") fmt.Fprintln(os.Stderr, "\tlist-persons") } flag.Parse(true) if flag.NArg() == 0 { fmt.Fprintln(os.Stderr, "Not enough arguments") return } if *dsStr == "" { fmt.Fprintln(os.Stderr, "Required flag '--ds' not set") return } ds, err := spec.GetDataset(*dsStr) if err != nil { fmt.Fprintf(os.Stderr, "Could not create dataset: %s\n", err) return } defer ds.Database().Close() switch flag.Arg(0) { case "add-person": addPerson(ds) case "list-persons": listPersons(ds) default: fmt.Fprintf(os.Stderr, "Unknown command: %s\n", flag.Arg(0)) } }
func main() { flag.Usage = func() { fmt.Fprintf(os.Stderr, "usage: %s <url> <dataset>\n", os.Args[0]) flag.PrintDefaults() } spec.RegisterDatabaseFlags(flag.CommandLine) flag.Parse(true) if len(flag.Args()) != 2 { d.CheckError(errors.New("expected url and dataset flags")) } ds, err := spec.GetDataset(flag.Arg(1)) d.CheckError(err) url := flag.Arg(0) if url == "" { flag.Usage() } res, err := http.Get(url) if err != nil { log.Fatalf("Error fetching %s: %+v\n", url, err) } else if res.StatusCode != 200 { log.Fatalf("Error fetching %s: %s\n", url, res.Status) } defer res.Body.Close() var jsonObject interface{} err = json.NewDecoder(res.Body).Decode(&jsonObject) if err != nil { log.Fatalln("Error decoding JSON: ", err) } _, err = ds.CommitValue(jsontonoms.NomsValueFromDecodedJSON(jsonObject, true)) d.PanicIfError(err) ds.Database().Close() }
func main() { // Actually the delimiter uses runes, which can be multiple characters long. // https://blog.golang.org/strings delimiter := flag.String("delimiter", ",", "field delimiter for csv file, must be exactly one character long.") spec.RegisterDatabaseFlags(flag.CommandLine) profile.RegisterProfileFlags(flag.CommandLine) flag.Usage = func() { fmt.Fprintln(os.Stderr, "Usage: csv-export [options] dataset > filename") flag.PrintDefaults() } flag.Parse(true) if flag.NArg() != 1 { d.CheckError(errors.New("expected dataset arg")) } ds, err := spec.GetDataset(flag.Arg(0)) d.CheckError(err) defer ds.Database().Close() comma, err := csv.StringToRune(*delimiter) d.CheckError(err) err = d.Try(func() { defer profile.MaybeStartProfile().Stop() nomsList, structDesc := csv.ValueToListAndElemDesc(ds.HeadValue(), ds.Database()) csv.Write(nomsList, structDesc, comma, os.Stdout) }) if err != nil { fmt.Println("Failed to export dataset as CSV:") fmt.Println(err) } }
"github.com/attic-labs/noms/go/d" "github.com/attic-labs/noms/go/spec" "github.com/attic-labs/noms/go/types" "github.com/attic-labs/noms/go/util/jsontonoms" "github.com/attic-labs/noms/go/util/profile" "github.com/clbanning/mxj" flag "github.com/tsuru/gnuflag" ) var ( noIO = flag.Bool("benchmark", false, "Run in 'benchmark' mode, without file-IO") customUsage = func() { fmtString := `%s walks the given directory, looking for .xml files. When it finds one, the entity inside is parsed into nested Noms maps/lists and committed to the dataset indicated on the command line.` fmt.Fprintf(os.Stderr, fmtString, os.Args[0]) fmt.Fprintf(os.Stderr, "\n\nUsage: %s [options] <path/to/root/directory> <dataset>\n", os.Args[0]) flag.PrintDefaults() } ) type fileIndex struct { path string index int } type refIndex struct { ref types.Ref index int } type refIndexList []refIndex
func main() { comment := flag.String("comment", "", "comment to add to commit's meta data") spec.RegisterDatabaseFlags(flag.CommandLine) flag.Usage = func() { fmt.Fprintf(os.Stderr, "Fetches a URL (or file) into a noms blob\n\nUsage: %s <url-or-local-path> <dataset>:\n", os.Args[0]) flag.PrintDefaults() } flag.Parse(true) if flag.NArg() != 2 { d.CheckErrorNoUsage(errors.New("expected dataset and url arguments")) } ds, err := spec.GetDataset(flag.Arg(1)) d.CheckErrorNoUsage(err) defer ds.Database().Close() url := flag.Arg(0) fileOrUrl := "file" start = time.Now() var pr io.Reader if strings.HasPrefix(url, "http") { resp, err := http.Get(url) if err != nil { fmt.Fprintf(os.Stderr, "Could not fetch url %s, error: %s\n", url, err) return } switch resp.StatusCode / 100 { case 4, 5: fmt.Fprintf(os.Stderr, "Could not fetch url %s, error: %d (%s)\n", url, resp.StatusCode, resp.Status) return } pr = progressreader.New(resp.Body, getStatusPrinter(resp.ContentLength)) fileOrUrl = "url" } else { // assume it's a file f, err := os.Open(url) if err != nil { fmt.Fprintf(os.Stderr, "Invalid URL %s - does not start with 'http' and isn't local file either. fopen error: %s", url, err) return } s, err := f.Stat() if err != nil { fmt.Fprintf(os.Stderr, "Could not stat file %s: %s", url, err) return } pr = progressreader.New(f, getStatusPrinter(s.Size())) fileOrUrl = "file" } b := types.NewStreamingBlob(pr, ds.Database()) mi := metaInfoForCommit(fileOrUrl, url, *comment) ds, err = ds.Commit(b, dataset.CommitOptions{Meta: mi}) if err != nil { d.Chk.Equal(datas.ErrMergeNeeded, err) fmt.Fprintf(os.Stderr, "Could not commit, optimistic concurrency failed.") return } status.Done() fmt.Println("Done") }
func main() { // Actually the delimiter uses runes, which can be multiple characters long. // https://blog.golang.org/strings delimiter := flag.String("delimiter", ",", "field delimiter for csv file, must be exactly one character long.") comment := flag.String("comment", "", "comment to add to commit's meta data") header := flag.String("header", "", "header row. If empty, we'll use the first row of the file") name := flag.String("name", "Row", "struct name. The user-visible name to give to the struct type that will hold each row of data.") columnTypes := flag.String("column-types", "", "a comma-separated list of types representing the desired type of each column. if absent all types default to be String") pathDescription := "noms path to blob to import" path := flag.String("path", "", pathDescription) flag.StringVar(path, "p", "", pathDescription) dateFlag := flag.String("date", "", fmt.Sprintf(`date of commit in ISO 8601 format ("%s"). By default, the current date is used.`, dateFormat)) noProgress := flag.Bool("no-progress", false, "prevents progress from being output if true") destType := flag.String("dest-type", "list", "the destination type to import to. can be 'list' or 'map:<pk>', where <pk> is the index position (0-based) of the column that is a the unique identifier for the column") skipRecords := flag.Uint("skip-records", 0, "number of records to skip at beginning of file") destTypePattern := regexp.MustCompile("^(list|map):(\\d+)$") spec.RegisterDatabaseFlags(flag.CommandLine) profile.RegisterProfileFlags(flag.CommandLine) flag.Usage = func() { fmt.Fprintf(os.Stderr, "Usage: csv-import [options] <csvfile> <dataset>\n\n") flag.PrintDefaults() } flag.Parse(true) var err error switch { case flag.NArg() == 0: err = errors.New("Maybe you put options after the dataset?") case flag.NArg() == 1 && *path == "": err = errors.New("If <csvfile> isn't specified, you must specify a noms path with -p") case flag.NArg() == 2 && *path != "": err = errors.New("Cannot specify both <csvfile> and a noms path with -p") case flag.NArg() > 2: err = errors.New("Too many arguments") } d.CheckError(err) var date = *dateFlag if date == "" { date = time.Now().UTC().Format(dateFormat) } else { _, err := time.Parse(dateFormat, date) d.CheckErrorNoUsage(err) } defer profile.MaybeStartProfile().Stop() var r io.Reader var size uint64 var filePath string var dataSetArgN int if *path != "" { db, val, err := spec.GetPath(*path) d.CheckError(err) if val == nil { d.CheckError(fmt.Errorf("Path %s not found\n", *path)) } blob, ok := val.(types.Blob) if !ok { d.CheckError(fmt.Errorf("Path %s not a Blob: %s\n", *path, types.EncodedValue(val.Type()))) } defer db.Close() r = blob.Reader() size = blob.Len() dataSetArgN = 0 } else { filePath = flag.Arg(0) res, err := os.Open(filePath) d.CheckError(err) defer res.Close() fi, err := res.Stat() d.CheckError(err) r = res size = uint64(fi.Size()) dataSetArgN = 1 } if !*noProgress { r = progressreader.New(r, getStatusPrinter(size)) } comma, err := csv.StringToRune(*delimiter) d.CheckErrorNoUsage(err) var dest int var pk int if *destType == "list" { dest = destList } else if match := destTypePattern.FindStringSubmatch(*destType); match != nil { dest = destMap pk, err = strconv.Atoi(match[2]) d.CheckErrorNoUsage(err) } else { fmt.Println("Invalid dest-type: ", *destType) return } cr := csv.NewCSVReader(r, comma) for i := uint(0); i < *skipRecords; i++ { cr.Read() } var headers []string if *header == "" { headers, err = cr.Read() d.PanicIfError(err) } else { headers = strings.Split(*header, string(comma)) } ds, err := spec.GetDataset(flag.Arg(dataSetArgN)) d.CheckError(err) defer ds.Database().Close() kinds := []types.NomsKind{} if *columnTypes != "" { kinds = csv.StringsToKinds(strings.Split(*columnTypes, ",")) } var value types.Value if dest == destList { value, _ = csv.ReadToList(cr, *name, headers, kinds, ds.Database()) } else { value = csv.ReadToMap(cr, headers, pk, kinds, ds.Database()) } mi := metaInfoForCommit(date, filePath, *path, *comment) _, err = ds.Commit(value, dataset.CommitOptions{Meta: mi}) if !*noProgress { status.Clear() } d.PanicIfError(err) }
func main() { comment := flag.String("comment", "", "comment to add to commit's meta data") stdin := flag.Bool("stdin", false, "read blob from stdin") spec.RegisterDatabaseFlags(flag.CommandLine) flag.Usage = func() { fmt.Fprintf(os.Stderr, "Fetches a URL, file, or stdin into a noms blob\n\nUsage: %s [--stdin?] [url-or-local-path?] [dataset]\n", os.Args[0]) flag.PrintDefaults() } flag.Parse(true) if !(*stdin && flag.NArg() == 1) && flag.NArg() != 2 { flag.Usage() os.Exit(-1) } start = time.Now() ds, err := spec.GetDataset(flag.Arg(flag.NArg() - 1)) d.CheckErrorNoUsage(err) defer ds.Database().Close() var r io.Reader var contentLength int64 var sourceType, sourceVal string if *stdin { r = os.Stdin contentLength = -1 } else if url := flag.Arg(0); strings.HasPrefix(url, "http") { resp, err := http.Get(url) if err != nil { fmt.Fprintf(os.Stderr, "Could not fetch url %s, error: %s\n", url, err) return } switch resp.StatusCode / 100 { case 4, 5: fmt.Fprintf(os.Stderr, "Could not fetch url %s, error: %d (%s)\n", url, resp.StatusCode, resp.Status) return } r = resp.Body contentLength = resp.ContentLength sourceType, sourceVal = "url", url } else { // assume it's a file f, err := os.Open(url) if err != nil { fmt.Fprintf(os.Stderr, "Invalid URL %s - does not start with 'http' and isn't local file either. fopen error: %s", url, err) return } s, err := f.Stat() if err != nil { fmt.Fprintf(os.Stderr, "Could not stat file %s: %s", url, err) return } r = f contentLength = s.Size() sourceType, sourceVal = "file", url } pr := progressreader.New(r, getStatusPrinter(contentLength)) b := types.NewStreamingBlob(pr, ds.Database()) mi := metaInfoForCommit(sourceType, sourceVal, *comment) ds, err = ds.Commit(b, dataset.CommitOptions{Meta: mi}) if err != nil { d.Chk.Equal(datas.ErrMergeNeeded, err) fmt.Fprintf(os.Stderr, "Could not commit, optimistic concurrency failed.") return } status.Done() fmt.Println("Done") }
func usage() { fmt.Println("Usage: demo-server -authkey <authkey> [options]") flag.PrintDefaults() }