func main() { runtime.GOMAXPROCS(runtime.NumCPU()) flags := datas.NewFlags() flag.Parse() ds, ok := flags.CreateDataStore() if !ok || *inputID == "" || *outputID == "" { flag.Usage() return } defer ds.Close() err := d.Try(func() { if util.MaybeStartCPUProfile() { defer util.StopCPUProfile() } inputDataset := dataset.NewDataset(ds, *inputID) outputDataset := dataset.NewDataset(ds, *outputID) input := inputDataset.Head().Value().(ListOfRefOfMapOfStringToValue) output := getIndex(input, ds) _, err := outputDataset.Commit(output) d.Exp.NoError(err) util.MaybeWriteMemProfile() }) if err != nil { log.Fatal(err) } }
func HandleGetHasRefs(w http.ResponseWriter, req *http.Request, ps URLParams, ds DataStore) { err := d.Try(func() { d.Exp.Equal("POST", req.Method) req.ParseForm() refStrs := req.PostForm["ref"] d.Exp.True(len(refStrs) > 0) refs := make([]ref.Ref, len(refStrs)) for idx, refStr := range refStrs { refs[idx] = ref.Parse(refStr) } w.Header().Add("Content-Type", "text/plain") writer := w.(io.Writer) if strings.Contains(req.Header.Get("Accept-Encoding"), "gzip") { w.Header().Add("Content-Encoding", "gzip") gw := gzip.NewWriter(w) defer gw.Close() writer = gw } sz := chunks.NewSerializer(writer) for _, r := range refs { has := ds.transitionalChunkStore().Has(r) fmt.Fprintf(writer, "%s %t\n", r, has) } sz.Close() }) if err != nil { http.Error(w, fmt.Sprintf("Error: %v", err), http.StatusBadRequest) return } }
func main() { cpuCount := runtime.NumCPU() runtime.GOMAXPROCS(cpuCount) flag.Usage = func() { fmt.Println("Usage: csv_exporter [options] > filename") flag.PrintDefaults() } flag.Parse() ds := dsFlags.CreateDataset() if ds == nil { flag.Usage() return } defer ds.Store().Close() comma, err := csv.StringToRune(*delimiter) if err != nil { fmt.Println(err.Error()) flag.Usage() return } err = d.Try(func() { nomsList, structDesc := csv.ValueToListAndElemDesc(ds.Head().Value(), ds.Store()) csv.Write(nomsList, structDesc, comma, os.Stdout) }) if err != nil { fmt.Println("Failed to export dataset as CSV:") fmt.Println(err) } }
func HandleRootGet(w http.ResponseWriter, req *http.Request, ps URLParams, ds DataStore) { err := d.Try(func() { d.Exp.Equal("GET", req.Method) rootRef := ds.transitionalChunkStore().Root() fmt.Fprintf(w, "%v", rootRef.String()) w.Header().Add("content-type", "text/plain") }) if err != nil { http.Error(w, fmt.Sprintf("Error: %v", err), http.StatusBadRequest) return } }
func main() { cpuCount := runtime.NumCPU() runtime.GOMAXPROCS(cpuCount) flag.Parse() sourceStore, ok := sourceStoreFlags.CreateDataStore() sink := sinkDsFlags.CreateDataset() if !ok || sink == nil || *p == 0 || *sourceObject == "" { flag.Usage() return } defer sourceStore.Close() defer sink.Store().Close() err := d.Try(func() { if util.MaybeStartCPUProfile() { defer util.StopCPUProfile() } sourceRef := ref.Ref{} if r, ok := ref.MaybeParse(*sourceObject); ok { if sourceStore.Has(r) { sourceRef = r } } else { if c, ok := sourceStore.MaybeHead(*sourceObject); ok { sourceRef = c.Ref() } } d.Exp.False(sourceRef.IsEmpty(), "Unknown source object: %s", *sourceObject) var err error *sink, err = sink.Pull(sourceStore, sourceRef, int(*p)) util.MaybeWriteMemProfile() d.Exp.NoError(err) }) if err != nil { log.Fatal(err) } }
func HandlePostRefs(w http.ResponseWriter, req *http.Request, ps URLParams, ds DataStore) { err := d.Try(func() { d.Exp.Equal("POST", req.Method) var reader io.Reader = req.Body if strings.Contains(req.Header.Get("Content-Encoding"), "gzip") { gr, err := gzip.NewReader(reader) d.Exp.NoError(err) defer gr.Close() reader = gr } chunks.Deserialize(reader, ds.transitionalChunkStore(), nil) w.WriteHeader(http.StatusCreated) }) if err != nil { http.Error(w, fmt.Sprintf("Error: %v", err), http.StatusBadRequest) return } }
func HandleRef(w http.ResponseWriter, req *http.Request, ps URLParams, ds DataStore) { err := d.Try(func() { d.Exp.Equal("GET", req.Method) r := ref.Parse(ps.ByName("ref")) chunk := ds.transitionalChunkStore().Get(r) if chunk.IsEmpty() { w.WriteHeader(http.StatusNotFound) return } _, err := io.Copy(w, bytes.NewReader(chunk.Data())) d.Chk.NoError(err) w.Header().Add("Content-Type", "application/octet-stream") w.Header().Add("Cache-Control", "max-age=31536000") // 1 year }) if err != nil { http.Error(w, fmt.Sprintf("Error: %v", err), http.StatusBadRequest) return } }
func main() { flag.Usage = func() { fmt.Fprintf(os.Stderr, "Usage of %s:\n %s path/to/staging/dir\n", os.Args[0], os.Args[0]) } flag.Parse() if flag.Arg(0) == "" { flag.Usage() os.Exit(1) } err := d.Try(func() { stagingDir, err := filepath.Abs(flag.Arg(0)) d.Exp.NoError(err, "Path to staging directory (first arg) must be valid, not %s", flag.Arg(0)) d.Exp.NoError(os.MkdirAll(stagingDir, 0755)) goPath := os.Getenv("GOPATH") d.Exp.NotEmpty(goPath, "GOPATH must be set!") workspace := os.Getenv("WORKSPACE") if workspace == "" { fmt.Printf("WORKSPACE not set in environment; using GOPATH (%s).\n", goPath) workspace = goPath } pythonPath := filepath.Join(goPath, nomsCheckoutPath, "tools") env := runner.Env{ "PYTHONPATH": pythonPath, } if !runner.Serial(os.Stdout, os.Stderr, env, ".", buildScript) { os.Exit(1) } if !runner.Serial(os.Stdout, os.Stderr, env, ".", stageScript, stagingDir) { os.Exit(1) } }) if err != nil { log.Fatal(err) } }
func HandleRootPost(w http.ResponseWriter, req *http.Request, ps URLParams, ds DataStore) { err := d.Try(func() { d.Exp.Equal("POST", req.Method) params := req.URL.Query() tokens := params["last"] d.Exp.Len(tokens, 1) last := ref.Parse(tokens[0]) tokens = params["current"] d.Exp.Len(tokens, 1) current := ref.Parse(tokens[0]) if !ds.transitionalChunkStore().UpdateRoot(current, last) { w.WriteHeader(http.StatusConflict) return } }) if err != nil { http.Error(w, fmt.Sprintf("Error: %v", err), http.StatusBadRequest) return } }
func TestParseError(t *testing.T) { assert := assert.New(t) assertParseError := func(s string) { e := d.Try(func() { Parse(s) }) assert.IsType(d.UsageError{}, e) } assertParseError("foo") assertParseError("sha1") assertParseError("sha1-0") // too many digits assertParseError("sha1-00000000000000000000000000000000000000000") // 'g' not valid hex assertParseError("sha1-000000000000000000000000000000000000000g") // sha2 not supported assertParseError("sha2-0000000000000000000000000000000000000000") r := Parse("sha1-0000000000000000000000000000000000000000") assert.NotNil(r) }
func resolveImports(aliases map[string]string, includePath string, vrw types.ValueReadWriter) map[string]ref.Ref { canonicalize := func(path string) string { if filepath.IsAbs(path) { return path } return filepath.Join(includePath, path) } imports := map[string]ref.Ref{} for alias, target := range aliases { var r ref.Ref if d.Try(func() { r = ref.Parse(target) }) != nil { canonical := canonicalize(target) inFile, err := os.Open(canonical) d.Chk.NoError(err) defer inFile.Close() parsedDep := ParseNomDL(alias, inFile, filepath.Dir(canonical), vrw) imports[alias] = vrw.WriteValue(parsedDep.Package).TargetRef() } else { imports[alias] = r } } return imports }
func main() { runtime.GOMAXPROCS(runtime.NumCPU()) flag.Usage = func() { fmt.Printf("Usage: %s -ldb=/path/to/db -input-ref=sha1-xyz -output-ds=quadtree\n\n", os.Args[0]) flag.PrintDefaults() } flag.Parse() start := time.Now() datastore, ok := datasFlags.CreateDataStore() if !ok || *inputRefStr == "" || *outputDs == "" { flag.Usage() return } defer datastore.Close() dataset := dataset.NewDataset(datastore, *outputDs) var inputRef ref.Ref err := d.Try(func() { inputRef = ref.Parse(*inputRefStr) }) if err != nil { log.Fatalf("Invalid inputRef: %v", *inputRefStr) } gr := common.GeorectangleDef{ TopLeft: common.GeopositionDef{Latitude: 37.83, Longitude: -122.52}, BottomRight: common.GeopositionDef{Latitude: 37.70, Longitude: -122.36}, } qtRoot := common.QuadTreeDef{ Nodes: common.ListOfNodeDef{}, Tiles: common.MapOfStringToQuadTreeDef{}, Depth: 0, NumDescendents: 0, Path: "", Georectangle: gr, } if !*quietFlag { fmt.Printf("quadTreeRoot: %+v\n", qtRoot.Georectangle) } nChan := make(chan *common.NodeDef, 1024) nodesConverted := uint32(0) type hasGeoposition interface { Geoposition() common.Geoposition } go func() { ds := dataset.Store() walk.SomeP(ds.ReadValue(inputRef), ds, func(v types.Value) (stop bool) { var g common.Geoposition switch v := v.(type) { case hasGeoposition: g = v.Geoposition() case types.Struct: if mg, ok := v.MaybeGet("geo"); ok { if mg, ok := mg.(common.Geoposition); ok { g = mg break } } default: return } // TODO: This check is mega bummer. We really only want to consider RefOfStruct, but it's complicated to filter the case of an inline struct out. if !ds.Has(v.Ref()) { return } stop = true nodeDef := &common.NodeDef{Geoposition: g.Def(), Reference: v.Ref()} nChan <- nodeDef nConverted := atomic.AddUint32(&nodesConverted, 1) if !*quietFlag && nConverted%1e5 == 0 { fmt.Printf("Nodes Converted: %d, elapsed time: %.2f secs\n", nodesConverted, SecsSince(start)) } return }, 64) close(nChan) }() nodesAppended := uint32(0) for nodeDef := range nChan { qtRoot.Append(nodeDef) nodesAppended++ if !*quietFlag && nodesAppended%1e5 == 0 { fmt.Printf("Nodes Appended: %d, elapsed time: %.2f secs\n", nodesAppended, SecsSince(start)) qtRoot.Analyze() } } if !*quietFlag { fmt.Printf("Nodes Appended: %d, elapsed time: %.2f secs\n", nodesAppended, SecsSince(start)) qtRoot.Analyze() fmt.Printf("Calling SaveToNoms(), elapsed time: %.2f secs\n", SecsSince(start)) } nomsQtRoot := qtRoot.SaveToNoms(dataset.Store(), start, *quietFlag) if !*quietFlag { fmt.Printf("Calling Commit(), elapsed time: %.2f secs\n", SecsSince(start)) } _, err = dataset.Commit(types.NewRef(nomsQtRoot.Ref())) d.Exp.NoError(err) if !*quietFlag { fmt.Printf("Commit completed, elapsed time: %.2f secs\n", time.Now().Sub(start).Seconds()) } fmt.Println("QuadTree ref:", nomsQtRoot.Ref()) }
func main() { err := d.Try(func() { dsFlags := dataset.NewFlags() flag.Usage = customUsage flag.Parse() ds := dsFlags.CreateDataset() dir := flag.Arg(0) if ds == nil || dir == "" { flag.Usage() return } defer ds.Store().Close() if util.MaybeStartCPUProfile() { defer util.StopCPUProfile() } cpuCount := runtime.NumCPU() runtime.GOMAXPROCS(cpuCount) filesChan := make(chan fileIndex, 1024) refsChan := make(chan refIndex, 1024) getFilePaths := func() { index := 0 err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { d.Exp.NoError(err, "Cannot traverse directories") if !info.IsDir() && filepath.Ext(path) == ".xml" { filesChan <- fileIndex{path, index} index++ } return nil }) d.Exp.NoError(err) close(filesChan) } wg := sync.WaitGroup{} importXml := func() { expectedType := util.NewMapOfStringToValue() for f := range filesChan { file, err := os.Open(f.path) d.Exp.NoError(err, "Error getting XML") xmlObject, err := mxj.NewMapXmlReader(file) d.Exp.NoError(err, "Error decoding XML") object := xmlObject.Old() file.Close() nomsObj := util.NomsValueFromDecodedJSON(object) d.Chk.IsType(expectedType, nomsObj) r := ref.Ref{} if !*noIO { r = ds.Store().WriteValue(nomsObj).TargetRef() } refsChan <- refIndex{r, f.index} } wg.Done() } go getFilePaths() for i := 0; i < cpuCount*8; i++ { wg.Add(1) go importXml() } go func() { wg.Wait() close(refsChan) // done converting xml to noms }() refList := refIndexList{} for r := range refsChan { refList = append(refList, r) } sort.Sort(refList) refs := make(util.ListOfRefOfMapOfStringToValueDef, len(refList)) for idx, r := range refList { refs[idx] = r.ref } if !*noIO { _, err := ds.Commit(refs.New()) d.Exp.NoError(err) } util.MaybeWriteMemProfile() }) if err != nil { log.Fatal(err) } }