func TestParseError(t *testing.T) { assert := assert.New(t) assertParseError := func(s string) { e := d.Try(func() { Parse(s) }) _, ok := e.(d.WrappedError) assert.True(ok) } assertParseError("foo") // too few digits assertParseError("0000000000000000000000000000000") // too many digits assertParseError("000000000000000000000000000000000") // 'w' not valid base32 assertParseError("00000000000000000000000000000000w") // no prefix assertParseError("sha1-00000000000000000000000000000000") assertParseError("sha2-00000000000000000000000000000000") r := Parse("00000000000000000000000000000000") assert.NotNil(r) }
func main() { spec.RegisterDatabaseFlags() cpuCount := runtime.NumCPU() runtime.GOMAXPROCS(cpuCount) flag.Usage = func() { fmt.Fprintln(os.Stderr, "Usage: csv-export [options] dataset > filename") flag.PrintDefaults() } flag.Parse() if flag.NArg() != 1 { util.CheckError(errors.New("expected dataset arg")) } ds, err := spec.GetDataset(flag.Arg(0)) util.CheckError(err) defer ds.Database().Close() comma, err := csv.StringToRune(*delimiter) util.CheckError(err) err = d.Try(func() { defer profile.MaybeStartProfile().Stop() nomsList, structDesc := csv.ValueToListAndElemDesc(ds.HeadValue(), ds.Database()) csv.Write(nomsList, structDesc, comma, os.Stdout) }) if err != nil { fmt.Println("Failed to export dataset as CSV:") fmt.Println(err) } }
func (suite *FileTestSuite) TestNoCopyDir() { dir, err := ioutil.TempDir(suite.dir, "") suite.NoError(err) dst := filepath.Join(suite.dir, "dst") suite.Error(d.Try(func() { DumbCopy(dir, dst) })) }
func HandleGetRefs(w http.ResponseWriter, req *http.Request, ps URLParams, cs chunks.ChunkStore) { err := d.Try(func() { d.Exp.Equal("POST", req.Method) hashes := extractHashes(req) w.Header().Add("Content-Type", "application/octet-stream") writer := respWriter(req, w) defer writer.Close() sz := chunks.NewSerializer(writer) for _, h := range hashes { c := cs.Get(h) if !c.IsEmpty() { sz.Put(c) } } sz.Close() }) if err != nil { http.Error(w, fmt.Sprintf("Error: %v", err), http.StatusBadRequest) return } }
func (spec databaseSpec) Database() (ds datas.Database, err error) { switch spec.Protocol { case "http", "https": err = d.Unwrap(d.Try(func() { ds = datas.NewRemoteDatabase(spec.String(), "Bearer "+spec.accessToken) })) case "ldb": err = d.Unwrap(d.Try(func() { ds = datas.NewDatabase(getLDBStore(spec.Path)) })) case "mem": ds = datas.NewDatabase(chunks.NewMemoryStore()) default: err = fmt.Errorf("Invalid path prototocol: %s", spec.Protocol) } return }
func (suite *ParsedResultTestSuite) assertTypes(source string, ts ...*types.Type) { err := d.Try(func() { i := runParser("", strings.NewReader(source)) for idx, t := range i.Types { // Cannot use t.Equals here since the parser generates a Type that cannot be serialized. suite.Equal(ts[idx], t) } }) suite.NoError(err, source) }
func HandleRootGet(w http.ResponseWriter, req *http.Request, ps URLParams, rt chunks.ChunkStore) { err := d.Try(func() { d.Exp.Equal("GET", req.Method) rootRef := rt.Root() fmt.Fprintf(w, "%v", rootRef.String()) w.Header().Add("content-type", "text/plain") }) if err != nil { http.Error(w, fmt.Sprintf("Error: %v", err), http.StatusBadRequest) return } }
func writeMetaLines(node LogNode, maxLines, lineno, maxLabelLen int, w io.Writer) (int, error) { if m, ok := node.commit.MaybeGet(datas.MetaField); ok { meta := m.(types.Struct) mlw := &maxLineWriter{numLines: lineno, maxLines: maxLines, node: node, dest: w, needsPrefix: true, showGraph: showGraph} err := d.Try(func() { meta.Type().Desc.(types.StructDesc).IterFields(func(fieldName string, t *types.Type) { v := meta.Get(fieldName) fmt.Fprintf(mlw, "%-*s", maxLabelLen+2, strings.Title(fieldName)+":") types.WriteEncodedValue(mlw, v) fmt.Fprintf(mlw, "\n") }) }) return mlw.numLines, err } return lineno, nil }
func HandleWriteValue(w http.ResponseWriter, req *http.Request, ps URLParams, cs chunks.ChunkStore) { hashes := hash.HashSlice{} err := d.Try(func() { d.Exp.Equal("POST", req.Method) reader := bodyReader(req) defer func() { // Ensure all data on reader is consumed io.Copy(ioutil.Discard, reader) reader.Close() }() vbs := types.NewValidatingBatchingSink(cs) vbs.Prepare(deserializeHints(reader)) chunkChan := make(chan *chunks.Chunk, 16) go chunks.DeserializeToChan(reader, chunkChan) var bpe chunks.BackpressureError for c := range chunkChan { if bpe == nil { bpe = vbs.Enqueue(*c) } else { bpe = append(bpe, c.Hash()) } // If a previous Enqueue() errored, we still need to drain chunkChan // TODO: what about having DeserializeToChan take a 'done' channel to stop it? hashes = append(hashes, c.Hash()) } if bpe == nil { bpe = vbs.Flush() } if bpe != nil { w.WriteHeader(httpStatusTooManyRequests) w.Header().Add("Content-Type", "application/octet-stream") writer := respWriter(req, w) defer writer.Close() serializeHashes(writer, bpe.AsHashes()) return } w.WriteHeader(http.StatusCreated) }) if err != nil { http.Error(w, fmt.Sprintf("Error: %v\nChunks in payload: %v", err, hashes), http.StatusBadRequest) return } }
func versionCheck(hndlr Handler) Handler { return func(w http.ResponseWriter, req *http.Request, ps URLParams, cs chunks.ChunkStore) { w.Header().Set(NomsVersionHeader, constants.NomsVersion) if req.Header.Get(NomsVersionHeader) != constants.NomsVersion { http.Error( w, fmt.Sprintf("Error: SDK version %s is incompatible with data of version %s", req.Header.Get(NomsVersionHeader), constants.NomsVersion), http.StatusBadRequest, ) return } err := d.Try(func() { hndlr(w, req, ps, cs) }) if err != nil { http.Error(w, fmt.Sprintf("Error: %v", err), http.StatusBadRequest) return } } }
func HandleHasRefs(w http.ResponseWriter, req *http.Request, ps URLParams, cs chunks.ChunkStore) { err := d.Try(func() { d.Exp.Equal("POST", req.Method) hashes := extractHashes(req) w.Header().Add("Content-Type", "text/plain") writer := respWriter(req, w) defer writer.Close() for _, h := range hashes { fmt.Fprintf(writer, "%s %t\n", h, cs.Has(h)) } }) if err != nil { http.Error(w, fmt.Sprintf("Error: %v", err), http.StatusBadRequest) return } }
func runServe(args []string) int { cs, err := spec.GetChunkStore(args[0]) d.CheckError(err) server := datas.NewRemoteDatabaseServer(cs, port) // Shutdown server gracefully so that profile may be written c := make(chan os.Signal, 1) signal.Notify(c, os.Interrupt) signal.Notify(c, syscall.SIGTERM) go func() { <-c server.Stop() }() d.Try(func() { defer profile.MaybeStartProfile().Stop() server.Run() }) return 0 }
func main() { flag.Usage = func() { fmt.Fprintf(os.Stderr, "Serves a Noms database over HTTP\n\n") fmt.Fprintf(os.Stderr, "Usage: noms serve <database>\n") flag.PrintDefaults() fmt.Fprintf(os.Stderr, "\nFor detailed information on spelling databases, see: at https://github.com/attic-labs/noms/blob/master/doc/spelling.md.\n\n") } spec.RegisterDatabaseFlags() flag.Parse() if len(flag.Args()) != 1 { flag.Usage() return } spec, err := spec.ParseDatabaseSpec(flag.Arg(0)) util.CheckError(err) if spec.Protocol != "mem" && spec.Protocol != "ldb" { err := errors.New("Illegal database spec for server, must be 'mem' or 'ldb'") util.CheckError(err) } cs, err := spec.ChunkStore() util.CheckError(err) server := datas.NewRemoteDatabaseServer(cs, *port) // Shutdown server gracefully so that profile may be written c := make(chan os.Signal, 1) signal.Notify(c, os.Interrupt) signal.Notify(c, syscall.SIGTERM) go func() { <-c server.Stop() }() d.Try(func() { defer profile.MaybeStartProfile().Stop() server.Run() }) }
func Diff(w io.Writer, v1, v2 types.Value) (err error) { dq := NewDiffQueue() di := diffInfo{path: types.NewPath().AddField("/"), v1: v1, v2: v2} dq.PushBack(di) err = d.Try(func() { for di, ok := dq.PopFront(); ok; di, ok = dq.PopFront() { p, key, v1, v2 := di.path, di.key, di.v1, di.v2 v1.Type().Kind() if v1 == nil && v2 != nil { line(w, addPrefix, key, v2) } if v1 != nil && v2 == nil { line(w, subPrefix, key, v1) } if !v1.Equals(v2) { if !canCompare(v1, v2) { line(w, subPrefix, key, v1) line(w, addPrefix, key, v2) } else { switch v1.Type().Kind() { case types.ListKind: diffLists(dq, w, p, v1.(types.List), v2.(types.List)) case types.MapKind: diffMaps(dq, w, p, v1.(types.Map), v2.(types.Map)) case types.SetKind: diffSets(dq, w, p, v1.(types.Set), v2.(types.Set)) case types.StructKind: diffStructs(dq, w, p, v1.(types.Struct), v2.(types.Struct)) default: panic("Unrecognized type in diff function") } } } } }) return }
func main() { flag.Usage = func() { fmt.Fprintf(os.Stderr, "Usage of %s:\n %s path/to/staging/dir\n", os.Args[0], os.Args[0]) } flag.Parse(true) if flag.Arg(0) == "" { flag.Usage() os.Exit(1) } err := d.Try(func() { stagingDir, err := filepath.Abs(flag.Arg(0)) d.PanicIfTrue(err != nil, "Path to staging directory (first arg) must be valid, not %s", flag.Arg(0)) d.PanicIfError(os.MkdirAll(stagingDir, 0755)) goPath := os.Getenv("GOPATH") d.PanicIfTrue(goPath == "", "GOPATH must be set!") workspace := os.Getenv("WORKSPACE") if workspace == "" { fmt.Printf("WORKSPACE not set in environment; using GOPATH (%s).\n", goPath) workspace = goPath } pythonPath := filepath.Join(goPath, nomsCheckoutPath, "tools") env := runner.Env{ "PYTHONPATH": pythonPath, } if !runner.Serial(os.Stdout, os.Stderr, env, ".", buildScript) { os.Exit(1) } if !runner.Serial(os.Stdout, os.Stderr, env, ".", stageScript, stagingDir) { os.Exit(1) } }) if err != nil { log.Fatal(err) } }
func main() { cpuCount := runtime.NumCPU() runtime.GOMAXPROCS(cpuCount) flag.Usage = func() { fmt.Fprintf(os.Stderr, "Moves datasets between or within databases\n\n") fmt.Fprintf(os.Stderr, "noms sync [options] <source-object> <dest-dataset>\n\n") flag.PrintDefaults() fmt.Fprintf(os.Stderr, "\nFor detailed information on spelling objects and datasets, see: at https://github.com/attic-labs/noms/blob/master/doc/spelling.md.\n\n") } spec.RegisterDatabaseFlags() flag.Parse() if flag.NArg() != 2 { util.CheckError(errors.New("expected a source object and destination dataset")) } sourceStore, sourceObj, err := spec.GetPath(flag.Arg(0)) util.CheckError(err) defer sourceStore.Close() sinkDataset, err := spec.GetDataset(flag.Arg(1)) util.CheckError(err) defer sinkDataset.Database().Close() err = d.Try(func() { defer profile.MaybeStartProfile().Stop() var err error sinkDataset, err = sinkDataset.Pull(sourceStore, types.NewRef(sourceObj), int(*p)) d.Exp.NoError(err) }) if err != nil { log.Fatal(err) } }
func main() { // Actually the delimiter uses runes, which can be multiple characters long. // https://blog.golang.org/strings delimiter := flag.String("delimiter", ",", "field delimiter for csv file, must be exactly one character long.") spec.RegisterDatabaseFlags(flag.CommandLine) profile.RegisterProfileFlags(flag.CommandLine) flag.Usage = func() { fmt.Fprintln(os.Stderr, "Usage: csv-export [options] dataset > filename") flag.PrintDefaults() } flag.Parse(true) if flag.NArg() != 1 { d.CheckError(errors.New("expected dataset arg")) } ds, err := spec.GetDataset(flag.Arg(0)) d.CheckError(err) defer ds.Database().Close() comma, err := csv.StringToRune(*delimiter) d.CheckError(err) err = d.Try(func() { defer profile.MaybeStartProfile().Stop() nomsList, structDesc := csv.ValueToListAndElemDesc(ds.HeadValue(), ds.Database()) csv.Write(nomsList, structDesc, comma, os.Stdout) }) if err != nil { fmt.Println("Failed to export dataset as CSV:") fmt.Println(err) } }
func HandleRootPost(w http.ResponseWriter, req *http.Request, ps URLParams, rt chunks.ChunkStore) { err := d.Try(func() { d.Exp.Equal("POST", req.Method) params := req.URL.Query() tokens := params["last"] d.Exp.Len(tokens, 1) last := hash.Parse(tokens[0]) tokens = params["current"] d.Exp.Len(tokens, 1) current := hash.Parse(tokens[0]) if !rt.UpdateRoot(current, last) { w.WriteHeader(http.StatusConflict) return } }) if err != nil { http.Error(w, fmt.Sprintf("Error: %v", err), http.StatusBadRequest) return } }
func TestParseError(t *testing.T) { assert := assert.New(t) assertParseError := func(s string) { e := d.Try(func() { Parse(s) }) assert.IsType(d.UsageError{}, e) } assertParseError("foo") assertParseError("sha1") assertParseError("sha1-0") // too many digits assertParseError("sha1-00000000000000000000000000000000000000000") // 'g' not valid hex assertParseError("sha1-000000000000000000000000000000000000000g") // sha2 not supported assertParseError("sha2-0000000000000000000000000000000000000000") r := Parse("sha1-0000000000000000000000000000000000000000") assert.NotNil(r) }
func main() { err := d.Try(func() { spec.RegisterDatabaseFlags(flag.CommandLine) profile.RegisterProfileFlags(flag.CommandLine) flag.Usage = customUsage flag.Parse(true) if flag.NArg() != 2 { d.CheckError(errors.New("Expected directory path followed by dataset")) } dir := flag.Arg(0) ds, err := spec.GetDataset(flag.Arg(1)) d.CheckError(err) defer profile.MaybeStartProfile().Stop() cpuCount := runtime.NumCPU() filesChan := make(chan fileIndex, 1024) refsChan := make(chan refIndex, 1024) getFilePaths := func() { index := 0 err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { d.PanicIfTrue(err != nil, "Cannot traverse directories") if !info.IsDir() && filepath.Ext(path) == ".xml" { filesChan <- fileIndex{path, index} index++ } return nil }) d.PanicIfError(err) close(filesChan) } wg := sync.WaitGroup{} importXML := func() { expectedType := types.NewMap() for f := range filesChan { file, err := os.Open(f.path) d.PanicIfTrue(err != nil, "Error getting XML") xmlObject, err := mxj.NewMapXmlReader(file) d.PanicIfTrue(err != nil, "Error decoding XML") object := xmlObject.Old() file.Close() nomsObj := jsontonoms.NomsValueFromDecodedJSON(object, false) d.Chk.IsType(expectedType, nomsObj) var r types.Ref if !*noIO { r = ds.Database().WriteValue(nomsObj) } refsChan <- refIndex{r, f.index} } wg.Done() } go getFilePaths() for i := 0; i < cpuCount*8; i++ { wg.Add(1) go importXML() } go func() { wg.Wait() close(refsChan) // done converting xml to noms }() refList := refIndexList{} for r := range refsChan { refList = append(refList, r) } sort.Sort(refList) refs := make([]types.Value, len(refList)) for idx, r := range refList { refs[idx] = r.ref } rl := types.NewList(refs...) if !*noIO { _, err := ds.CommitValue(rl) d.PanicIfError(err) } }) if err != nil { log.Fatal(err) } }
func Diff(w io.Writer, v1, v2 types.Value) error { return d.Try(func() { diff(w, types.NewPath(), nil, v1, v2) }) }