func main() { spec.RegisterDatabaseFlags() cpuCount := runtime.NumCPU() runtime.GOMAXPROCS(cpuCount) flag.Usage = func() { fmt.Fprintln(os.Stderr, "Usage: csv-export [options] dataset > filename") flag.PrintDefaults() } flag.Parse() if flag.NArg() != 1 { util.CheckError(errors.New("expected dataset arg")) } ds, err := spec.GetDataset(flag.Arg(0)) util.CheckError(err) defer ds.Database().Close() comma, err := csv.StringToRune(*delimiter) util.CheckError(err) err = d.Try(func() { defer profile.MaybeStartProfile().Stop() nomsList, structDesc := csv.ValueToListAndElemDesc(ds.HeadValue(), ds.Database()) csv.Write(nomsList, structDesc, comma, os.Stdout) }) if err != nil { fmt.Println("Failed to export dataset as CSV:") fmt.Println(err) } }
func main() { flag.Usage = func() { fmt.Fprintln(os.Stderr, "Shows a serialization of a Noms object\n") fmt.Fprintln(os.Stderr, "Usage: noms show <object>\n") flag.PrintDefaults() fmt.Fprintf(os.Stderr, "\nSee \"Spelling Objects\" at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the object argument.\n\n") } flag.Parse() if *showHelp { flag.Usage() return } if len(flag.Args()) != 1 { util.CheckError(errors.New("expected exactly one argument")) } database, value, err := spec.GetPath(flag.Arg(0)) util.CheckError(err) waitChan := outputpager.PageOutput(!*outputpager.NoPager) types.WriteEncodedValueWithTags(os.Stdout, value) fmt.Fprintf(os.Stdout, "\n") database.Close() if waitChan != nil { os.Stdout.Close() <-waitChan } }
func main() { flag.Usage = func() { fmt.Fprintf(os.Stderr, "usage: %s [options] <dataset>\n", os.Args[0]) flag.PrintDefaults() } spec.RegisterDatabaseFlags() flag.Parse() if flag.NArg() != 1 { util.CheckError(errors.New("expected dataset arg")) } ds, err := spec.GetDataset(flag.Arg(0)) util.CheckError(err) defer ds.Database().Close() newVal := uint64(1) if lastVal, ok := ds.MaybeHeadValue(); ok { newVal = uint64(lastVal.(types.Number)) + 1 } _, err = ds.Commit(types.Number(newVal)) d.Exp.NoError(err) fmt.Println(newVal) }
func main() { color = flag.Int("color", -1, "value of 1 forces color on, 2 forces color off") maxLines = flag.Int("max-lines", 10, "max number of lines to show per commit (-1 for all lines)") maxCommits = flag.Int("n", 0, "max number of commits to display (0 for all commits)") showHelp = flag.Bool("help", false, "show help text") showGraph = flag.Bool("graph", false, "show ascii-based commit hierarcy on left side of output") showValue = flag.Bool("show-value", false, "show commit value rather than diff information -- this is temporary") flag.Usage = func() { fmt.Fprintln(os.Stderr, "Displays the history of a Noms dataset\n") fmt.Fprintln(os.Stderr, "Usage: noms log <commitObject>") fmt.Fprintln(os.Stderr, "commitObject must be a dataset or object spec that refers to a commit.") flag.PrintDefaults() fmt.Fprintf(os.Stderr, "\nSee \"Spelling Objects\" at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the object argument.\n\n") } flag.Parse() if *showHelp { flag.Usage() return } if len(flag.Args()) != 1 { util.CheckError(errors.New("expected exactly one argument")) } useColor = shouldUseColor() database, value, err := spec.GetPath(flag.Arg(0)) if err != nil { util.CheckErrorNoUsage(err) } defer database.Close() waitChan := outputpager.PageOutput(!*outputpager.NoPager) origCommit, ok := value.(types.Struct) if !ok || !origCommit.Type().Equals(datas.CommitType()) { util.CheckError(fmt.Errorf("%s does not reference a Commit object", flag.Arg(0))) } iter := NewCommitIterator(database, origCommit) displayed := 0 if *maxCommits <= 0 { *maxCommits = math.MaxInt32 } for ln, ok := iter.Next(); ok && displayed < *maxCommits; ln, ok = iter.Next() { if printCommit(ln, database) != nil { break } displayed++ } if waitChan != nil { os.Stdout.Close() <-waitChan } }
func main() { toDelete := flag.String("d", "", "dataset to delete") flag.Usage = func() { fmt.Fprintln(os.Stderr, "Noms dataset management\n") fmt.Fprintln(os.Stderr, "Usage: noms ds [<database> | -d <dataset>]") flag.PrintDefaults() fmt.Fprintf(os.Stderr, "\nFor detailed information on spelling datastores and datasets, see: at https://github.com/attic-labs/noms/blob/master/doc/spelling.md.\n\n") } flag.Parse() if *toDelete != "" { set, err := spec.GetDataset(*toDelete) util.CheckError(err) oldCommitRef, errBool := set.MaybeHeadRef() if !errBool { util.CheckError(fmt.Errorf("Dataset %v not found", set.ID())) } store, err := set.Database().Delete(set.ID()) util.CheckError(err) defer store.Close() fmt.Printf("Deleted dataset %v (was %v)\n\n", set.ID(), oldCommitRef.TargetHash().String()) } else { if flag.NArg() != 1 { flag.Usage() return } store, err := spec.GetDatabase(flag.Arg(0)) util.CheckError(err) defer store.Close() store.Datasets().IterAll(func(k, v types.Value) { fmt.Println(k) }) } }
func main() { flag.Usage = func() { fmt.Fprintf(os.Stderr, "Serves a Noms database over HTTP\n\n") fmt.Fprintf(os.Stderr, "Usage: noms serve <database>\n") flag.PrintDefaults() fmt.Fprintf(os.Stderr, "\nFor detailed information on spelling databases, see: at https://github.com/attic-labs/noms/blob/master/doc/spelling.md.\n\n") } spec.RegisterDatabaseFlags() flag.Parse() if len(flag.Args()) != 1 { flag.Usage() return } spec, err := spec.ParseDatabaseSpec(flag.Arg(0)) util.CheckError(err) if spec.Protocol != "mem" && spec.Protocol != "ldb" { err := errors.New("Illegal database spec for server, must be 'mem' or 'ldb'") util.CheckError(err) } cs, err := spec.ChunkStore() util.CheckError(err) server := datas.NewRemoteDatabaseServer(cs, *port) // Shutdown server gracefully so that profile may be written c := make(chan os.Signal, 1) signal.Notify(c, os.Interrupt) signal.Notify(c, syscall.SIGTERM) go func() { <-c server.Stop() }() d.Try(func() { defer profile.MaybeStartProfile().Stop() server.Run() }) }
func main() { flag.Usage = func() { fmt.Fprintf(os.Stderr, "usage: %s <url> <dataset>\n", os.Args[0]) flag.PrintDefaults() } spec.RegisterDatabaseFlags() flag.Parse() if len(flag.Args()) != 2 { util.CheckError(errors.New("expected url and dataset flags")) } ds, err := spec.GetDataset(flag.Arg(1)) util.CheckError(err) url := flag.Arg(0) if url == "" { flag.Usage() } res, err := http.Get(url) if err != nil { log.Fatalf("Error fetching %s: %+v\n", url, err) } else if res.StatusCode != 200 { log.Fatalf("Error fetching %s: %s\n", url, res.Status) } defer res.Body.Close() var jsonObject interface{} err = json.NewDecoder(res.Body).Decode(&jsonObject) if err != nil { log.Fatalln("Error decoding JSON: ", err) } _, err = ds.Commit(util.NomsValueFromDecodedJSON(jsonObject, true)) d.Exp.NoError(err) ds.Database().Close() }
func main() { cpuCount := runtime.NumCPU() runtime.GOMAXPROCS(cpuCount) flag.Usage = func() { fmt.Fprintf(os.Stderr, "Moves datasets between or within databases\n\n") fmt.Fprintf(os.Stderr, "noms sync [options] <source-object> <dest-dataset>\n\n") flag.PrintDefaults() fmt.Fprintf(os.Stderr, "\nFor detailed information on spelling objects and datasets, see: at https://github.com/attic-labs/noms/blob/master/doc/spelling.md.\n\n") } spec.RegisterDatabaseFlags() flag.Parse() if flag.NArg() != 2 { util.CheckError(errors.New("expected a source object and destination dataset")) } sourceStore, sourceObj, err := spec.GetPath(flag.Arg(0)) util.CheckError(err) defer sourceStore.Close() sinkDataset, err := spec.GetDataset(flag.Arg(1)) util.CheckError(err) defer sinkDataset.Database().Close() err = d.Try(func() { defer profile.MaybeStartProfile().Stop() var err error sinkDataset, err = sinkDataset.Pull(sourceStore, types.NewRef(sourceObj), int(*p)) d.Exp.NoError(err) }) if err != nil { log.Fatal(err) } }
func main() { flag.Usage = func() { fmt.Fprintln(os.Stderr, "Shows the difference between two objects\n") fmt.Fprintln(os.Stderr, "Usage: noms diff <object1> <object2>\n") flag.PrintDefaults() fmt.Fprintf(os.Stderr, "\nSee \"Spelling Objects\" at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the object argument.\n\n") } flag.Parse() if *showHelp { flag.Usage() return } if len(flag.Args()) != 2 { util.CheckError(errors.New("expected exactly two arguments")) } db1, value1, err := spec.GetPath(flag.Arg(0)) util.CheckError(err) defer db1.Close() db2, value2, err := spec.GetPath(flag.Arg(1)) util.CheckError(err) defer db2.Close() waitChan := outputpager.PageOutput(!*outputpager.NoPager) diff.Diff(os.Stdout, value1, value2) fmt.Fprintf(os.Stdout, "\n") if waitChan != nil { os.Stdout.Close() <-waitChan } }
func main() { var ( // Actually the delimiter uses runes, which can be multiple characters long. // https://blog.golang.org/strings delimiter = flag.String("delimiter", ",", "field delimiter for csv file, must be exactly one character long.") header = flag.String("header", "", "header row. If empty, we'll use the first row of the file") name = flag.String("name", "Row", "struct name. The user-visible name to give to the struct type that will hold each row of data.") columnTypes = flag.String("column-types", "", "a comma-separated list of types representing the desired type of each column. if absent all types default to be String") noProgress = flag.Bool("no-progress", false, "prevents progress from being output if true") destType = flag.String("dest-type", "list", "the destination type to import to. can be 'list' or 'map:<pk>', where <pk> is the index position (0-based) of the column that is a the unique identifier for the column") destTypePattern = regexp.MustCompile("^(list|map):(\\d+)$") ) spec.RegisterDatabaseFlags() cpuCount := runtime.NumCPU() runtime.GOMAXPROCS(cpuCount) flag.Usage = func() { fmt.Fprintf(os.Stderr, "Usage: csv-import [options] <dataset> <csvfile>\n\n") flag.PrintDefaults() } flag.Parse() if flag.NArg() != 2 { err := fmt.Errorf("Expected exactly two parameters (dataset and path) after flags, but you have %d. Maybe you put a flag after the path?", flag.NArg()) util.CheckError(err) } path := flag.Arg(1) defer profile.MaybeStartProfile().Stop() res, err := os.Open(path) d.Exp.NoError(err) defer res.Close() comma, err := csv.StringToRune(*delimiter) if err != nil { util.CheckError(err) return } var dest int var pk int if *destType == "list" { dest = destList } else if match := destTypePattern.FindStringSubmatch(*destType); match != nil { dest = destMap pk, err = strconv.Atoi(match[2]) d.Chk.NoError(err) } else { fmt.Println("Invalid dest-type: ", *destType) return } fi, err := res.Stat() d.Chk.NoError(err) var r io.Reader = res if !*noProgress { r = progressreader.New(r, getStatusPrinter(uint64(fi.Size()))) } cr := csv.NewCSVReader(r, comma) var headers []string if *header == "" { headers, err = cr.Read() d.Exp.NoError(err) } else { headers = strings.Split(*header, string(comma)) } ds, err := spec.GetDataset(flag.Arg(0)) util.CheckError(err) defer ds.Database().Close() kinds := []types.NomsKind{} if *columnTypes != "" { kinds = csv.StringsToKinds(strings.Split(*columnTypes, ",")) } var value types.Value if dest == destList { value, _ = csv.ReadToList(cr, *name, headers, kinds, ds.Database()) } else { value = csv.ReadToMap(cr, headers, pk, kinds, ds.Database()) } _, err = ds.Commit(value) if !*noProgress { status.Clear() } d.Exp.NoError(err) }
func main() { flag.Usage = func() { fmt.Fprintf(os.Stderr, "Fetches a URL into a noms blob\n\nUsage: %s <dataset> <url>:\n", os.Args[0]) flag.PrintDefaults() } spec.RegisterDatabaseFlags() flag.Parse() if flag.NArg() != 2 { util.CheckError(errors.New("expected dataset and url arguments")) } ds, err := spec.GetDataset(flag.Arg(0)) util.CheckError(err) defer ds.Database().Close() url := flag.Arg(1) start = time.Now() var pr io.Reader if strings.HasPrefix(url, "http") { resp, err := http.Get(url) if err != nil { fmt.Fprintf(os.Stderr, "Could not fetch url %s, error: %s\n", url, err) return } switch resp.StatusCode / 100 { case 4, 5: fmt.Fprintf(os.Stderr, "Could not fetch url %s, error: %d (%s)\n", url, resp.StatusCode, resp.Status) return } pr = progressreader.New(resp.Body, getStatusPrinter(resp.ContentLength)) } else { // assume it's a file f, err := os.Open(url) if err != nil { fmt.Fprintf(os.Stderr, "Invalid URL %s - does not start with 'http' and isn't local file either. fopen error: %s", url, err) return } s, err := f.Stat() if err != nil { fmt.Fprintf(os.Stderr, "Could not stat file %s: %s", url, err) return } pr = progressreader.New(f, getStatusPrinter(s.Size())) } b := types.NewBlob(pr) ds, err = ds.Commit(b) if err != nil { d.Chk.True(datas.ErrMergeNeeded == err) fmt.Fprintf(os.Stderr, "Could not commit, optimistic concurrency failed.") return } status.Done() fmt.Println("Done") }
func main() { err := d.Try(func() { spec.RegisterDatabaseFlags() flag.Usage = customUsage flag.Parse() if flag.NArg() != 2 { util.CheckError(errors.New("Expected dataset followed by directory path")) } dir := flag.Arg(1) ds, err := spec.GetDataset(flag.Arg(0)) util.CheckError(err) defer profile.MaybeStartProfile().Stop() cpuCount := runtime.NumCPU() runtime.GOMAXPROCS(cpuCount) filesChan := make(chan fileIndex, 1024) refsChan := make(chan refIndex, 1024) getFilePaths := func() { index := 0 err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { d.Exp.NoError(err, "Cannot traverse directories") if !info.IsDir() && filepath.Ext(path) == ".xml" { filesChan <- fileIndex{path, index} index++ } return nil }) d.Exp.NoError(err) close(filesChan) } wg := sync.WaitGroup{} importXML := func() { expectedType := types.NewMap() for f := range filesChan { file, err := os.Open(f.path) d.Exp.NoError(err, "Error getting XML") xmlObject, err := mxj.NewMapXmlReader(file) d.Exp.NoError(err, "Error decoding XML") object := xmlObject.Old() file.Close() nomsObj := util.NomsValueFromDecodedJSON(object, false) d.Chk.IsType(expectedType, nomsObj) var r types.Ref if !*noIO { r = ds.Database().WriteValue(nomsObj) } refsChan <- refIndex{r, f.index} } wg.Done() } go getFilePaths() for i := 0; i < cpuCount*8; i++ { wg.Add(1) go importXML() } go func() { wg.Wait() close(refsChan) // done converting xml to noms }() refList := refIndexList{} for r := range refsChan { refList = append(refList, r) } sort.Sort(refList) refs := make([]types.Value, len(refList)) for idx, r := range refList { refs[idx] = r.ref } rl := types.NewList(refs...) if !*noIO { _, err := ds.Commit(rl) d.Exp.NoError(err) } }) if err != nil { log.Fatal(err) } }