// fromHTTP panics with an httputil value on failure func (r *WithAttrRequest) fromHTTP(req *http.Request) { r.Signer = blob.ParseOrZero(req.FormValue("signer")) r.Value = req.FormValue("value") fuzzy := req.FormValue("fuzzy") // exact match if empty fuzzyMatch := false if fuzzy != "" { lowered := strings.ToLower(fuzzy) if lowered == "true" || lowered == "t" { fuzzyMatch = true } } r.Attr = req.FormValue("attr") // all attributes if empty if r.Attr == "" { // and force fuzzy in that case. fuzzyMatch = true } r.Fuzzy = fuzzyMatch r.ThumbnailSize = thumbnailSize(req) max := req.FormValue("max") if max != "" { maxR, err := strconv.Atoi(max) if err != nil { panic(httputil.InvalidParameterError("max")) } r.N = maxR } r.N = r.n() }
func (sto *appengineStorage) EnumerateBlobs(dest chan<- blob.SizedRef, after string, limit int, wait time.Duration) error { defer close(dest) ctx := sto.ctx if ctx == nil { loan := ctxPool.Get() defer loan.Return() ctx = loan } prefix := sto.namespace + "|" keyBegin := datastore.NewKey(ctx, memKind, prefix+after, 0, nil) keyEnd := datastore.NewKey(ctx, memKind, sto.namespace+"~", 0, nil) q := datastore.NewQuery(memKind).Limit(int(limit)).Filter("__key__>", keyBegin).Filter("__key__<", keyEnd) it := q.Run(ctx) var row memEnt for { key, err := it.Next(&row) if err == datastore.Done { break } if err != nil { return err } dest <- blob.SizedRef{blob.ParseOrZero(key.StringID()[len(prefix):]), row.Size} } return nil }
func (sto *appengineStorage) EnumerateBlobs(ctx context.Context, dest chan<- blob.SizedRef, after string, limit int) error { defer close(dest) loan := ctxPool.Get() defer loan.Return() actx := loan prefix := sto.namespace + "|" keyBegin := datastore.NewKey(actx, memKind, prefix+after, 0, nil) keyEnd := datastore.NewKey(actx, memKind, sto.namespace+"~", 0, nil) q := datastore.NewQuery(memKind).Limit(int(limit)).Filter("__key__>", keyBegin).Filter("__key__<", keyEnd) it := q.Run(actx) var row memEnt for { key, err := it.Next(&row) if err == datastore.Done { break } if err != nil { return err } select { case dest <- blob.SizedRef{blob.ParseOrZero(key.StringID()[len(prefix):]), uint32(row.Size)}: case <-ctx.Done(): return ctx.Err() } } return nil }
func blobFromURLPath(path string) blob.Ref { matches := kGetPattern.FindStringSubmatch(path) if len(matches) != 3 { return blob.Ref{} } return blob.ParseOrZero(strings.TrimPrefix(matches[0], "/camli/")) }
func NewFlatStatCache(gen string) *FlatStatCache { filename := filepath.Join(osutil.CacheDir(), "camput.statcache."+escapeGen(gen)) fc := &FlatStatCache{ filename: filename, m: make(map[string]fileInfoPutRes), } f, err := os.Open(filename) if os.IsNotExist(err) { return fc } if err != nil { log.Fatalf("opening camput stat cache: %v", filename, err) } defer f.Close() br := bufio.NewReader(f) for { ln, err := br.ReadString('\n') if err == io.EOF { break } if err != nil { log.Printf("Warning: (ignoring) reading stat cache: %v", err) break } ln = strings.TrimSpace(ln) f := strings.Split(ln, "\t") if len(f) < 3 { continue } filename, fp, putres := f[0], statFingerprint(f[1]), f[2] f = strings.Split(putres, "/") if len(f) != 2 { continue } blobrefStr := f[0] blobSize, err := strconv.ParseInt(f[1], 10, 64) if err != nil { continue } fc.m[filename] = fileInfoPutRes{ Fingerprint: fp, Result: client.PutResult{ BlobRef: blob.ParseOrZero(blobrefStr), Size: blobSize, Skipped: true, // is this used? }, } } vlog.Printf("Flatcache read %d entries from %s", len(fc.m), filename) return fc }
// processEncryptedMetaBlob decrypts dat (the data for the br meta blob) and parses // its meta lines, updating the index. // // processEncryptedMetaBlob is not thread-safe. func (s *storage) processEncryptedMetaBlob(br blob.Ref, dat []byte) error { mi := &metaBlobInfo{ br: br, n: len(dat), } log.Printf("processing meta blob %v: %d bytes", br, len(dat)) ivSize := s.block.BlockSize() if len(dat) < ivSize+sha1.Size { return errors.New("data size is smaller than IV + SHA-1") } var ( iv = dat[:ivSize] wantHash = dat[ivSize : ivSize+sha1.Size] enc = dat[ivSize+sha1.Size:] ) plain := bytes.NewBuffer(make([]byte, 0, len(dat))) io.Copy(plain, cipher.StreamReader{ S: cipher.NewCTR(s.block, iv), R: bytes.NewReader(enc), }) s1 := sha1.New() s1.Write(plain.Bytes()) if !bytes.Equal(wantHash, s1.Sum(nil)) { return errors.New("hash of encrypted data doesn't match") } sc := bufio.NewScanner(plain) if !sc.Scan() { return errors.New("No first line") } if sc.Text() != "#camlistore/encmeta=1" { line := sc.Text() if len(line) > 80 { line = line[:80] } return fmt.Errorf("unsupported first line %q", line) } for sc.Scan() { line := sc.Text() slash := strings.Index(line, "/") if slash < 0 { return errors.New("no slash in metaline") } plainBR, meta := line[:slash], line[slash+1:] log.Printf("Adding meta: %q = %q", plainBR, meta) mi.plains = append(mi.plains, blob.ParseOrZero(plainBR)) if err := s.index.Set(plainBR, meta); err != nil { return err } } return sc.Err() }
func PathsOfSignerTarget(t *testing.T, initIdx func() *index.Index) { id := NewIndexDeps(initIdx()) id.Fataler = t pn := id.NewPermanode() t.Logf("uploaded permanode %q", pn) claim1 := id.SetAttribute(pn, "camliPath:somedir", "targ-123") claim2 := id.SetAttribute(pn, "camliPath:with|pipe", "targ-124") t.Logf("made path claims %q and %q", claim1, claim2) id.dumpIndex(t) type test struct { blobref string want int } tests := []test{ {"targ-123", 1}, {"targ-124", 1}, {"targ-125", 0}, } for _, tt := range tests { signer := id.SignerBlobRef paths, err := id.Index.PathsOfSignerTarget(signer, blob.ParseOrZero(tt.blobref)) if err != nil { t.Fatalf("PathsOfSignerTarget(%q): %v", tt.blobref, err) } if len(paths) != tt.want { t.Fatalf("PathsOfSignerTarget(%q) got %d results; want %d", tt.blobref, len(paths), tt.want) } if tt.blobref == "targ-123" { p := paths[0] want := fmt.Sprintf( "Path{Claim: %s, 2011-11-28T01:32:37.000123456Z; Base: %s + Suffix \"somedir\" => Target targ-123}", claim1, pn) if g := p.String(); g != want { t.Errorf("claim wrong.\n got: %s\nwant: %s", g, want) } } } path, err := id.Index.PathLookup(id.SignerBlobRef, pn, "with|pipe", time.Now()) if err != nil { t.Fatalf("PathLookup = %v", err) } if g, e := path.Target.String(), "targ-124"; g != e { t.Errorf("PathLookup = %q; want %q", g, e) } }
// parent returns the base path and the blobRef of pr.subject's parent. // It returns an error if pr.subject or pr.subjectBasePath were not set // properly (with findSubject), or if the parent was not found. func (pr *publishRequest) parent() (parentPath string, parentBlobRef blob.Ref, err error) { if !pr.subject.Valid() { return "", blob.Ref{}, errors.New("subject not set") } if pr.subjectBasePath == "" { return "", blob.Ref{}, errors.New("subjectBasePath not set") } // TODO(mpl): this fails when the parent is the root. fix it. hops := publishedPath(pr.subjectBasePath).splitHops() if len(hops) == 0 { return "", blob.Ref{}, errors.New("No subresource digest in subjectBasePath") } subjectDigest := hops[len(hops)-1] if subjectDigest != pr.subject.DigestPrefix(digestLen) { return "", blob.Ref{}, errors.New("subject digest not in subjectBasePath") } parentPath = strings.TrimSuffix(pr.subjectBasePath, "/"+digestPrefix+subjectDigest) if len(hops) == 1 { // the parent is the suffix, not one of the subresource hops for br, _ := range pr.inSubjectChain { if br != pr.subject.String() { parentBlobRef = blob.ParseOrZero(br) break } } } else { // nested collection(s) parentDigest := hops[len(hops)-2] for br, _ := range pr.inSubjectChain { bref, ok := blob.Parse(br) if !ok { return "", blob.Ref{}, fmt.Errorf("Could not parse %q as blobRef", br) } if bref.DigestPrefix(10) == parentDigest { parentBlobRef = bref break } } } if !parentBlobRef.Valid() { return "", blob.Ref{}, fmt.Errorf("No parent found for %v", pr.subjectBasePath) } return parentPath, parentBlobRef, nil }
// NewFromShareRoot uses shareBlobURL to set up and return a client that // will be used to fetch shared blobs. func NewFromShareRoot(shareBlobURL string, opts ...ClientOption) (c *Client, target blob.Ref, err error) { var root string m := shareURLRx.FindStringSubmatch(shareBlobURL) if m == nil { return nil, blob.Ref{}, fmt.Errorf("Unkown share URL base") } c = New(m[1]) c.discoOnce.Do(func() { /* nothing */ }) c.prefixOnce.Do(func() { /* nothing */ }) c.prefixv = m[1] c.isSharePrefix = true c.authMode = auth.None{} c.via = make(map[string]string) root = m[2] for _, v := range opts { v.modifyClient(c) } c.SetHTTPClient(&http.Client{Transport: c.TransportForConfig(nil)}) req := c.newRequest("GET", shareBlobURL, nil) res, err := c.doReqGated(req) if err != nil { return nil, blob.Ref{}, fmt.Errorf("Error fetching %s: %v", shareBlobURL, err) } defer res.Body.Close() b, err := schema.BlobFromReader(blob.ParseOrZero(root), res.Body) if err != nil { return nil, blob.Ref{}, fmt.Errorf("Error parsing JSON from %s: %v", shareBlobURL, err) } if b.ShareAuthType() != schema.ShareHaveRef { return nil, blob.Ref{}, fmt.Errorf("Unknown share authType of %q", b.ShareAuthType()) } target = b.ShareTarget() if !target.Valid() { return nil, blob.Ref{}, fmt.Errorf("No target.") } c.via[target.String()] = root return c, target, nil }
func (c *Corpus) pnCamliContent(pn blob.Ref) (cc blob.Ref, t time.Time, ok bool) { // TODO(bradfitz): keep this property cached pm, ok := c.permanodes[pn] if !ok { return } for _, cl := range pm.Claims { if cl.Attr != "camliContent" { continue } // TODO: pass down the 'PermanodeConstraint.At' parameter, and then do: if cl.Date.After(at) { continue } switch cl.Type { case string(schema.DelAttributeClaim): cc = blob.Ref{} t = time.Time{} case string(schema.SetAttributeClaim): cc = blob.ParseOrZero(cl.Value) t = cl.Date } } return cc, t, cc.Valid() }
// NewFromShareRoot uses shareBlobURL to set up and return a client that // will be used to fetch shared blobs. func NewFromShareRoot(shareBlobURL string, opts ...ClientOption) (c *Client, target blob.Ref, err error) { var root string m := shareURLRx.FindStringSubmatch(shareBlobURL) if m == nil { return nil, blob.Ref{}, fmt.Errorf("Unkown share URL base") } c = New(m[1], opts...) c.discoOnce.Do(noop) c.prefixOnce.Do(noop) c.prefixv = m[1] c.isSharePrefix = true c.authMode = auth.None{} c.via = make(map[string]string) root = m[2] req := c.newRequest("GET", shareBlobURL, nil) res, err := c.expect2XX(req) if err != nil { return nil, blob.Ref{}, fmt.Errorf("error fetching %s: %v", shareBlobURL, err) } defer res.Body.Close() var buf bytes.Buffer b, err := schema.BlobFromReader(blob.ParseOrZero(root), io.TeeReader(res.Body, &buf)) if err != nil { return nil, blob.Ref{}, fmt.Errorf("error parsing JSON from %s: %v , with response: %q", shareBlobURL, err, buf.Bytes()) } if b.ShareAuthType() != schema.ShareHaveRef { return nil, blob.Ref{}, fmt.Errorf("unknown share authType of %q", b.ShareAuthType()) } target = b.ShareTarget() if !target.Valid() { return nil, blob.Ref{}, fmt.Errorf("no target.") } c.via[target.String()] = root return c, target, nil }
func Index(t *testing.T, initIdx func() *index.Index) { id := NewIndexDeps(initIdx()) id.Fataler = t pn := id.NewPermanode() t.Logf("uploaded permanode %q", pn) br1 := id.SetAttribute(pn, "tag", "foo1") br1Time := id.lastTime() t.Logf("set attribute %q", br1) br2 := id.SetAttribute(pn, "tag", "foo2") br2Time := id.lastTime() t.Logf("set attribute %q", br2) rootClaim := id.SetAttribute(pn, "camliRoot", "rootval") rootClaimTime := id.lastTime() t.Logf("set attribute %q", rootClaim) pnChild := id.NewPermanode() br3 := id.SetAttribute(pnChild, "tag", "bar") br3Time := id.lastTime() t.Logf("set attribute %q", br3) memberRef := id.AddAttribute(pn, "camliMember", pnChild.String()) t.Logf("add-attribute claim %q points to member permanode %q", memberRef, pnChild) memberRefTime := id.lastTime() // TODO(bradfitz): add EXIF tests here, once that stuff is ready. if false { camliRootPath, err := osutil.GoPackagePath("camlistore.org") if err != nil { t.Fatal("Package camlistore.org no found in $GOPATH or $GOPATH not defined") } for i := 1; i <= 8; i++ { fileBase := fmt.Sprintf("f%d-exif.jpg", i) fileName := filepath.Join(camliRootPath, "pkg", "images", "testdata", fileBase) contents, err := ioutil.ReadFile(fileName) if err != nil { t.Fatal(err) } id.UploadFile(fileBase, string(contents), noTime) } } // Upload a basic image. var jpegFileRef blob.Ref var exifFileRef blob.Ref { camliRootPath, err := osutil.GoPackagePath("camlistore.org") if err != nil { t.Fatal("Package camlistore.org no found in $GOPATH or $GOPATH not defined") } uploadFile := func(file string, modTime time.Time) blob.Ref { fileName := filepath.Join(camliRootPath, "pkg", "index", "indextest", "testdata", file) contents, err := ioutil.ReadFile(fileName) if err != nil { t.Fatal(err) } br, _ := id.UploadFile(file, string(contents), modTime) return br } jpegFileRef = uploadFile("dude.jpg", noTime) exifFileRef = uploadFile("dude-exif.jpg", time.Unix(1361248796, 0)) } // Upload the dir containing the two previous images imagesDirRef := id.UploadDir( "testdata", []blob.Ref{jpegFileRef, exifFileRef}, time.Now(), ) lastPermanodeMutation := id.lastTime() id.dumpIndex(t) key := "signerkeyid:sha1-ad87ca5c78bd0ce1195c46f7c98e6025abbaf007" if g, e := id.Get(key), "2931A67C26F5ABDA"; g != e { t.Fatalf("%q = %q, want %q", key, g, e) } key = "imagesize|" + jpegFileRef.String() if g, e := id.Get(key), "50|100"; g != e { t.Errorf("JPEG dude.jpg key %q = %q; want %q", key, g, e) } key = "filetimes|" + jpegFileRef.String() if g, e := id.Get(key), ""; g != e { t.Errorf("JPEG dude.jpg key %q = %q; want %q", key, g, e) } key = "filetimes|" + exifFileRef.String() if g, e := id.Get(key), "2013-02-18T01%3A11%3A20Z%2C2013-02-19T04%3A39%3A56Z"; g != e { t.Errorf("EXIF dude-exif.jpg key %q = %q; want %q", key, g, e) } key = "have:" + pn.String() pnSizeStr := id.Get(key) if pnSizeStr == "" { t.Fatalf("missing key %q", key) } key = "meta:" + pn.String() if g, e := id.Get(key), pnSizeStr+"|application/json; camliType=permanode"; g != e { t.Errorf("key %q = %q, want %q", key, g, e) } key = "recpn|2931A67C26F5ABDA|rt7988-88-71T98:67:62.999876543Z|" + br1.String() if g, e := id.Get(key), pn.String(); g != e { t.Fatalf("%q = %q, want %q (permanode)", key, g, e) } key = "recpn|2931A67C26F5ABDA|rt7988-88-71T98:67:61.999876543Z|" + br2.String() if g, e := id.Get(key), pn.String(); g != e { t.Fatalf("%q = %q, want %q (permanode)", key, g, e) } key = fmt.Sprintf("edgeback|%s|%s|%s", pnChild, pn, memberRef) if g, e := id.Get(key), "permanode|"; g != e { t.Fatalf("edgeback row %q = %q, want %q", key, g, e) } // PermanodeOfSignerAttrValue { gotPN, err := id.Index.PermanodeOfSignerAttrValue(id.SignerBlobRef, "camliRoot", "rootval") if err != nil { t.Fatalf("id.Index.PermanodeOfSignerAttrValue = %v", err) } if gotPN.String() != pn.String() { t.Errorf("id.Index.PermanodeOfSignerAttrValue = %q, want %q", gotPN, pn) } _, err = id.Index.PermanodeOfSignerAttrValue(id.SignerBlobRef, "camliRoot", "MISSING") if err == nil { t.Errorf("expected an error from PermanodeOfSignerAttrValue on missing value") } } // SearchPermanodesWithAttr - match attr type "tag" and value "foo1" { ch := make(chan blob.Ref, 10) req := &search.PermanodeByAttrRequest{ Signer: id.SignerBlobRef, Attribute: "tag", Query: "foo1"} err := id.Index.SearchPermanodesWithAttr(ch, req) if err != nil { t.Fatalf("SearchPermanodesWithAttr = %v", err) } var got []blob.Ref for r := range ch { got = append(got, r) } want := []blob.Ref{pn} if len(got) < 1 || got[0].String() != want[0].String() { t.Errorf("id.Index.SearchPermanodesWithAttr gives %q, want %q", got, want) } } // SearchPermanodesWithAttr - match all with attr type "tag" { ch := make(chan blob.Ref, 10) req := &search.PermanodeByAttrRequest{ Signer: id.SignerBlobRef, Attribute: "tag"} err := id.Index.SearchPermanodesWithAttr(ch, req) if err != nil { t.Fatalf("SearchPermanodesWithAttr = %v", err) } var got []blob.Ref for r := range ch { got = append(got, r) } want := []blob.Ref{pn, pnChild} if len(got) != len(want) { t.Errorf("SearchPermanodesWithAttr results differ.\n got: %q\nwant: %q", got, want) } for _, w := range want { found := false for _, g := range got { if g.String() == w.String() { found = true break } } if !found { t.Errorf("SearchPermanodesWithAttr: %v was not found.\n", w) } } } // GetRecentPermanodes { ch := make(chan *search.Result, 10) // expect 2 results, but maybe more if buggy. err := id.Index.GetRecentPermanodes(ch, id.SignerBlobRef, 50) if err != nil { t.Fatalf("GetRecentPermanodes = %v", err) } got := []*search.Result{} for r := range ch { got = append(got, r) } want := []*search.Result{ &search.Result{ BlobRef: pn, Signer: id.SignerBlobRef, LastModTime: lastPermanodeMutation.Unix(), }, &search.Result{ BlobRef: pnChild, Signer: id.SignerBlobRef, LastModTime: br3Time.Unix(), }, } if len(got) != len(want) { t.Errorf("GetRecentPermanode results differ.\n got: %v\nwant: %v", search.Results(got), search.Results(want)) } for _, w := range want { found := false for _, g := range got { if reflect.DeepEqual(g, w) { found = true break } } if !found { t.Errorf("GetRecentPermanode: %v was not found.\n got: %v\nwant: %v", w, search.Results(got), search.Results(want)) } } } // GetDirMembers { ch := make(chan blob.Ref, 10) // expect 2 results err := id.Index.GetDirMembers(imagesDirRef, ch, 50) if err != nil { t.Fatalf("GetDirMembers = %v", err) } got := []blob.Ref{} for r := range ch { got = append(got, r) } want := []blob.Ref{jpegFileRef, exifFileRef} if len(got) != len(want) { t.Errorf("GetDirMembers results differ.\n got: %v\nwant: %v", got, want) } for _, w := range want { found := false for _, g := range got { if w.String() == g.String() { found = true break } } if !found { t.Errorf("GetDirMembers: %v was not found.", w) } } } // GetBlobMIMEType { mime, size, err := id.Index.GetBlobMIMEType(pn) if err != nil { t.Errorf("GetBlobMIMEType(%q) = %v", pn, err) } else { if e := "application/json; camliType=permanode"; mime != e { t.Errorf("GetBlobMIMEType(%q) mime = %q, want %q", pn, mime, e) } if size == 0 { t.Errorf("GetBlobMIMEType(%q) size is zero", pn) } } _, _, err = id.Index.GetBlobMIMEType(blob.ParseOrZero("abc-123")) if err != os.ErrNotExist { t.Errorf("GetBlobMIMEType(dummy blobref) = %v; want os.ErrNotExist", err) } } // GetOwnerClaims { claims, err := id.Index.GetOwnerClaims(pn, id.SignerBlobRef) if err != nil { t.Errorf("GetOwnerClaims = %v", err) } else { want := search.ClaimList([]*search.Claim{ &search.Claim{ BlobRef: br1, Permanode: pn, Signer: id.SignerBlobRef, Date: br1Time.UTC(), Type: "set-attribute", Attr: "tag", Value: "foo1", }, &search.Claim{ BlobRef: br2, Permanode: pn, Signer: id.SignerBlobRef, Date: br2Time.UTC(), Type: "set-attribute", Attr: "tag", Value: "foo2", }, &search.Claim{ BlobRef: rootClaim, Permanode: pn, Signer: id.SignerBlobRef, Date: rootClaimTime.UTC(), Type: "set-attribute", Attr: "camliRoot", Value: "rootval", }, &search.Claim{ BlobRef: memberRef, Permanode: pn, Signer: id.SignerBlobRef, Date: memberRefTime.UTC(), Type: "add-attribute", Attr: "camliMember", Value: pnChild.String(), }, }) if !reflect.DeepEqual(claims, want) { t.Errorf("GetOwnerClaims results differ.\n got: %v\nwant: %v", claims, want) } } } }
// populate hits the blobstore to populate map of child nodes. func (n *mutDir) populate() error { n.mu.Lock() defer n.mu.Unlock() // Only re-populate if we haven't done so recently. now := time.Now() if n.lastPop.Add(populateInterval).After(now) { return nil } n.lastPop = now res, err := n.fs.client.Describe(&search.DescribeRequest{ BlobRef: n.permanode, Depth: 3, }) if err != nil { log.Println("mutDir.paths:", err) return nil } db := res.Meta[n.permanode.String()] if db == nil { return errors.New("dir blobref not described") } // Find all child permanodes and stick them in n.children if n.children == nil { n.children = make(map[string]mutFileOrDir) } currentChildren := map[string]bool{} for k, v := range db.Permanode.Attr { const p = "camliPath:" if !strings.HasPrefix(k, p) || len(v) < 1 { continue } name := k[len(p):] childRef := v[0] child := res.Meta[childRef] if child == nil { log.Printf("child not described: %v", childRef) continue } if child.Permanode == nil { log.Printf("invalid child, not a permanode: %v", childRef) continue } if target := child.Permanode.Attr.Get("camliSymlinkTarget"); target != "" { // This is a symlink. n.maybeAddChild(name, child.Permanode, &mutFile{ fs: n.fs, permanode: blob.ParseOrZero(childRef), parent: n, name: name, symLink: true, target: target, }) } else if isDir(child.Permanode) { // This is a directory. n.maybeAddChild(name, child.Permanode, &mutDir{ fs: n.fs, permanode: blob.ParseOrZero(childRef), parent: n, name: name, }) } else if contentRef := child.Permanode.Attr.Get("camliContent"); contentRef != "" { // This is a file. content := res.Meta[contentRef] if content == nil { log.Printf("child content not described: %v", childRef) continue } if content.CamliType != "file" { log.Printf("child not a file: %v", childRef) continue } if content.File == nil { log.Printf("camlitype \"file\" child %v has no described File member", childRef) continue } n.maybeAddChild(name, child.Permanode, &mutFile{ fs: n.fs, permanode: blob.ParseOrZero(childRef), parent: n, name: name, content: blob.ParseOrZero(contentRef), size: content.File.Size, }) } else { // unhandled type... continue } currentChildren[name] = true } // Remove unreferenced children for name, oldchild := range n.children { if _, ok := currentChildren[name]; !ok { if oldchild.eligibleToDelete() { delete(n.children, name) } } } return nil }
func Index(t *testing.T, initIdx func() *index.Index) { oldLocal := time.Local time.Local = time.UTC defer func() { time.Local = oldLocal }() id := NewIndexDeps(initIdx()) id.Fataler = t defer id.DumpIndex(t) pn := id.NewPermanode() t.Logf("uploaded permanode %q", pn) br1 := id.SetAttribute(pn, "tag", "foo1") br1Time := id.LastTime() t.Logf("set attribute %q", br1) br2 := id.SetAttribute(pn, "tag", "foo2") br2Time := id.LastTime() t.Logf("set attribute %q", br2) rootClaim := id.SetAttribute(pn, "camliRoot", "rootval") rootClaimTime := id.LastTime() t.Logf("set attribute %q", rootClaim) pnChild := id.NewPermanode() br3 := id.SetAttribute(pnChild, "tag", "bar") br3Time := id.LastTime() t.Logf("set attribute %q", br3) memberRef := id.AddAttribute(pn, "camliMember", pnChild.String()) t.Logf("add-attribute claim %q points to member permanode %q", memberRef, pnChild) memberRefTime := id.LastTime() // TODO(bradfitz): add EXIF tests here, once that stuff is ready. if false { camliRootPath, err := osutil.GoPackagePath("camlistore.org") if err != nil { t.Fatal("Package camlistore.org no found in $GOPATH or $GOPATH not defined") } for i := 1; i <= 8; i++ { fileBase := fmt.Sprintf("f%d-exif.jpg", i) fileName := filepath.Join(camliRootPath, "pkg", "images", "testdata", fileBase) contents, err := ioutil.ReadFile(fileName) if err != nil { t.Fatal(err) } id.UploadFile(fileBase, string(contents), noTime) } } // Upload some files. var jpegFileRef, exifFileRef, mediaFileRef, mediaWholeRef blob.Ref { camliRootPath, err := osutil.GoPackagePath("camlistore.org") if err != nil { t.Fatal("Package camlistore.org no found in $GOPATH or $GOPATH not defined") } uploadFile := func(file string, modTime time.Time) (fileRef, wholeRef blob.Ref) { fileName := filepath.Join(camliRootPath, "pkg", "index", "indextest", "testdata", file) contents, err := ioutil.ReadFile(fileName) if err != nil { t.Fatal(err) } fileRef, wholeRef = id.UploadFile(file, string(contents), modTime) return } jpegFileRef, _ = uploadFile("dude.jpg", noTime) exifFileRef, _ = uploadFile("dude-exif.jpg", time.Unix(1361248796, 0)) mediaFileRef, mediaWholeRef = uploadFile("0s.mp3", noTime) } // Upload the dir containing the previous files. imagesDirRef := id.UploadDir( "testdata", []blob.Ref{jpegFileRef, exifFileRef, mediaFileRef}, time.Now(), ) lastPermanodeMutation := id.LastTime() key := "signerkeyid:sha1-ad87ca5c78bd0ce1195c46f7c98e6025abbaf007" if g, e := id.Get(key), "2931A67C26F5ABDA"; g != e { t.Fatalf("%q = %q, want %q", key, g, e) } key = "imagesize|" + jpegFileRef.String() if g, e := id.Get(key), "50|100"; g != e { t.Errorf("JPEG dude.jpg key %q = %q; want %q", key, g, e) } key = "filetimes|" + jpegFileRef.String() if g, e := id.Get(key), ""; g != e { t.Errorf("JPEG dude.jpg key %q = %q; want %q", key, g, e) } key = "filetimes|" + exifFileRef.String() if g, e := id.Get(key), "2013-02-18T01%3A11%3A20Z%2C2013-02-19T04%3A39%3A56Z"; g != e { t.Errorf("EXIF dude-exif.jpg key %q = %q; want %q", key, g, e) } key = "have:" + pn.String() pnSizeStr := strings.TrimSuffix(id.Get(key), "|indexed") if pnSizeStr == "" { t.Fatalf("missing key %q", key) } key = "meta:" + pn.String() if g, e := id.Get(key), pnSizeStr+"|application/json; camliType=permanode"; g != e { t.Errorf("key %q = %q, want %q", key, g, e) } key = "recpn|2931A67C26F5ABDA|rt7988-88-71T98:67:62.999876543Z|" + br1.String() if g, e := id.Get(key), pn.String(); g != e { t.Fatalf("%q = %q, want %q (permanode)", key, g, e) } key = "recpn|2931A67C26F5ABDA|rt7988-88-71T98:67:61.999876543Z|" + br2.String() if g, e := id.Get(key), pn.String(); g != e { t.Fatalf("%q = %q, want %q (permanode)", key, g, e) } key = fmt.Sprintf("edgeback|%s|%s|%s", pnChild, pn, memberRef) if g, e := id.Get(key), "permanode|"; g != e { t.Fatalf("edgeback row %q = %q, want %q", key, g, e) } mediaTests := []struct { prop, exp string }{ {"title", "Zero Seconds"}, {"artist", "Test Artist"}, {"album", "Test Album"}, {"genre", "(20)Alternative"}, {"musicbrainzalbumid", "00000000-0000-0000-0000-000000000000"}, {"year", "1992"}, {"track", "1"}, {"disc", "2"}, {"mediaref", "sha1-fefac74a1d5928316d7131747107c8a61b71ffe4"}, {"durationms", "26"}, } for _, tt := range mediaTests { key = fmt.Sprintf("mediatag|%s|%s", mediaWholeRef.String(), tt.prop) if g, _ := url.QueryUnescape(id.Get(key)); g != tt.exp { t.Errorf("0s.mp3 key %q = %q; want %q", key, g, tt.exp) } } // PermanodeOfSignerAttrValue { gotPN, err := id.Index.PermanodeOfSignerAttrValue(id.SignerBlobRef, "camliRoot", "rootval") if err != nil { t.Fatalf("id.Index.PermanodeOfSignerAttrValue = %v", err) } if gotPN.String() != pn.String() { t.Errorf("id.Index.PermanodeOfSignerAttrValue = %q, want %q", gotPN, pn) } _, err = id.Index.PermanodeOfSignerAttrValue(id.SignerBlobRef, "camliRoot", "MISSING") if err == nil { t.Errorf("expected an error from PermanodeOfSignerAttrValue on missing value") } } // SearchPermanodesWithAttr - match attr type "tag" and value "foo1" { ch := make(chan blob.Ref, 10) req := &camtypes.PermanodeByAttrRequest{ Signer: id.SignerBlobRef, Attribute: "tag", Query: "foo1", } err := id.Index.SearchPermanodesWithAttr(ch, req) if err != nil { t.Fatalf("SearchPermanodesWithAttr = %v", err) } var got []blob.Ref for r := range ch { got = append(got, r) } want := []blob.Ref{pn} if len(got) < 1 || got[0].String() != want[0].String() { t.Errorf("id.Index.SearchPermanodesWithAttr gives %q, want %q", got, want) } } // SearchPermanodesWithAttr - match all with attr type "tag" { ch := make(chan blob.Ref, 10) req := &camtypes.PermanodeByAttrRequest{ Signer: id.SignerBlobRef, Attribute: "tag", } err := id.Index.SearchPermanodesWithAttr(ch, req) if err != nil { t.Fatalf("SearchPermanodesWithAttr = %v", err) } var got []blob.Ref for r := range ch { got = append(got, r) } want := []blob.Ref{pn, pnChild} if len(got) != len(want) { t.Errorf("SearchPermanodesWithAttr results differ.\n got: %q\nwant: %q", got, want) } for _, w := range want { found := false for _, g := range got { if g.String() == w.String() { found = true break } } if !found { t.Errorf("SearchPermanodesWithAttr: %v was not found.\n", w) } } } // Delete value "pony" of type "title" (which does not actually exist) for pn br4 := id.DelAttribute(pn, "title", "pony") br4Time := id.LastTime() // and verify it is not found when searching by attr { ch := make(chan blob.Ref, 10) req := &camtypes.PermanodeByAttrRequest{ Signer: id.SignerBlobRef, Attribute: "title", Query: "pony", } err := id.Index.SearchPermanodesWithAttr(ch, req) if err != nil { t.Fatalf("SearchPermanodesWithAttr = %v", err) } var got []blob.Ref for r := range ch { got = append(got, r) } want := []blob.Ref{} if len(got) != len(want) { t.Errorf("SearchPermanodesWithAttr results differ.\n got: %q\nwant: %q", got, want) } } // GetRecentPermanodes { verify := func(prefix string, want []camtypes.RecentPermanode, before time.Time) { ch := make(chan camtypes.RecentPermanode, 10) // expect 2 results, but maybe more if buggy. err := id.Index.GetRecentPermanodes(ch, id.SignerBlobRef, 50, before) if err != nil { t.Fatalf("[%s] GetRecentPermanodes = %v", prefix, err) } got := []camtypes.RecentPermanode{} for r := range ch { got = append(got, r) } if len(got) != len(want) { t.Errorf("[%s] GetRecentPermanode results differ.\n got: %v\nwant: %v", prefix, searchResults(got), searchResults(want)) } for _, w := range want { found := false for _, g := range got { if g.Equal(w) { found = true break } } if !found { t.Errorf("[%s] GetRecentPermanode: %v was not found.\n got: %v\nwant: %v", prefix, w, searchResults(got), searchResults(want)) } } } want := []camtypes.RecentPermanode{ { Permanode: pn, Signer: id.SignerBlobRef, LastModTime: br4Time, }, { Permanode: pnChild, Signer: id.SignerBlobRef, LastModTime: br3Time, }, } before := time.Time{} verify("Zero before", want, before) before = lastPermanodeMutation t.Log("lastPermanodeMutation", lastPermanodeMutation, lastPermanodeMutation.Unix()) verify("Non-zero before", want[1:], before) } // GetDirMembers { ch := make(chan blob.Ref, 10) // expect 2 results err := id.Index.GetDirMembers(imagesDirRef, ch, 50) if err != nil { t.Fatalf("GetDirMembers = %v", err) } got := []blob.Ref{} for r := range ch { got = append(got, r) } want := []blob.Ref{jpegFileRef, exifFileRef, mediaFileRef} if len(got) != len(want) { t.Errorf("GetDirMembers results differ.\n got: %v\nwant: %v", got, want) } for _, w := range want { found := false for _, g := range got { if w == g { found = true break } } if !found { t.Errorf("GetDirMembers: %v was not found.", w) } } } // GetBlobMeta { meta, err := id.Index.GetBlobMeta(pn) if err != nil { t.Errorf("GetBlobMeta(%q) = %v", pn, err) } else { if e := "permanode"; meta.CamliType != e { t.Errorf("GetBlobMeta(%q) mime = %q, want %q", pn, meta.CamliType, e) } if meta.Size == 0 { t.Errorf("GetBlobMeta(%q) size is zero", pn) } } _, err = id.Index.GetBlobMeta(blob.ParseOrZero("abc-123")) if err != os.ErrNotExist { t.Errorf("GetBlobMeta(dummy blobref) = %v; want os.ErrNotExist", err) } } // AppendClaims { claims, err := id.Index.AppendClaims(nil, pn, id.SignerBlobRef, "") if err != nil { t.Errorf("AppendClaims = %v", err) } else { want := []camtypes.Claim{ { BlobRef: br1, Permanode: pn, Signer: id.SignerBlobRef, Date: br1Time.UTC(), Type: "set-attribute", Attr: "tag", Value: "foo1", }, { BlobRef: br2, Permanode: pn, Signer: id.SignerBlobRef, Date: br2Time.UTC(), Type: "set-attribute", Attr: "tag", Value: "foo2", }, { BlobRef: rootClaim, Permanode: pn, Signer: id.SignerBlobRef, Date: rootClaimTime.UTC(), Type: "set-attribute", Attr: "camliRoot", Value: "rootval", }, { BlobRef: memberRef, Permanode: pn, Signer: id.SignerBlobRef, Date: memberRefTime.UTC(), Type: "add-attribute", Attr: "camliMember", Value: pnChild.String(), }, { BlobRef: br4, Permanode: pn, Signer: id.SignerBlobRef, Date: br4Time.UTC(), Type: "del-attribute", Attr: "title", Value: "pony", }, } if !reflect.DeepEqual(claims, want) { t.Errorf("AppendClaims results differ.\n got: %v\nwant: %v", claims, want) } } } }
func PathsOfSignerTarget(t *testing.T, initIdx func() *index.Index) { id := NewIndexDeps(initIdx()) id.Fataler = t defer id.DumpIndex(t) signer := id.SignerBlobRef pn := id.NewPermanode() t.Logf("uploaded permanode %q", pn) claim1 := id.SetAttribute(pn, "camliPath:somedir", "targ-123") claim1Time := id.LastTime().UTC() claim2 := id.SetAttribute(pn, "camliPath:with|pipe", "targ-124") claim2Time := id.LastTime().UTC() t.Logf("made path claims %q and %q", claim1, claim2) type test struct { blobref string want int } tests := []test{ {"targ-123", 1}, {"targ-124", 1}, {"targ-125", 0}, } for _, tt := range tests { paths, err := id.Index.PathsOfSignerTarget(signer, blob.ParseOrZero(tt.blobref)) if err != nil { t.Fatalf("PathsOfSignerTarget(%q): %v", tt.blobref, err) } if len(paths) != tt.want { t.Fatalf("PathsOfSignerTarget(%q) got %d results; want %d", tt.blobref, len(paths), tt.want) } if tt.blobref == "targ-123" { p := paths[0] want := fmt.Sprintf( "Path{Claim: %s, %v; Base: %s + Suffix \"somedir\" => Target targ-123}", claim1, claim1Time, pn) if g := p.String(); g != want { t.Errorf("claim wrong.\n got: %s\nwant: %s", g, want) } } } tests = []test{ {"somedir", 1}, {"with|pipe", 1}, {"void", 0}, } for _, tt := range tests { paths, err := id.Index.PathsLookup(id.SignerBlobRef, pn, tt.blobref) if err != nil { t.Fatalf("PathsLookup(%q): %v", tt.blobref, err) } if len(paths) != tt.want { t.Fatalf("PathsLookup(%q) got %d results; want %d", tt.blobref, len(paths), tt.want) } if tt.blobref == "with|pipe" { p := paths[0] want := fmt.Sprintf( "Path{Claim: %s, %s; Base: %s + Suffix \"with|pipe\" => Target targ-124}", claim2, claim2Time, pn) if g := p.String(); g != want { t.Errorf("claim wrong.\n got: %s\nwant: %s", g, want) } } } // now test deletions // Delete an existing value claim3 := id.Delete(claim2) t.Logf("claim %q deletes path claim %q", claim3, claim2) tests = []test{ {"targ-123", 1}, {"targ-124", 0}, {"targ-125", 0}, } for _, tt := range tests { signer := id.SignerBlobRef paths, err := id.Index.PathsOfSignerTarget(signer, blob.ParseOrZero(tt.blobref)) if err != nil { t.Fatalf("PathsOfSignerTarget(%q): %v", tt.blobref, err) } if len(paths) != tt.want { t.Fatalf("PathsOfSignerTarget(%q) got %d results; want %d", tt.blobref, len(paths), tt.want) } } tests = []test{ {"somedir", 1}, {"with|pipe", 0}, {"void", 0}, } for _, tt := range tests { paths, err := id.Index.PathsLookup(id.SignerBlobRef, pn, tt.blobref) if err != nil { t.Fatalf("PathsLookup(%q): %v", tt.blobref, err) } if len(paths) != tt.want { t.Fatalf("PathsLookup(%q) got %d results; want %d", tt.blobref, len(paths), tt.want) } } // recreate second path, and test if the previous deletion of it // is indeed ignored. claim4 := id.Delete(claim3) t.Logf("delete claim %q deletes claim %q, which should undelete %q", claim4, claim3, claim2) tests = []test{ {"targ-123", 1}, {"targ-124", 1}, {"targ-125", 0}, } for _, tt := range tests { signer := id.SignerBlobRef paths, err := id.Index.PathsOfSignerTarget(signer, blob.ParseOrZero(tt.blobref)) if err != nil { t.Fatalf("PathsOfSignerTarget(%q): %v", tt.blobref, err) } if len(paths) != tt.want { t.Fatalf("PathsOfSignerTarget(%q) got %d results; want %d", tt.blobref, len(paths), tt.want) } // and check the modtime too if tt.blobref == "targ-124" { p := paths[0] want := fmt.Sprintf( "Path{Claim: %s, %v; Base: %s + Suffix \"with|pipe\" => Target targ-124}", claim2, claim2Time, pn) if g := p.String(); g != want { t.Errorf("claim wrong.\n got: %s\nwant: %s", g, want) } } } tests = []test{ {"somedir", 1}, {"with|pipe", 1}, {"void", 0}, } for _, tt := range tests { paths, err := id.Index.PathsLookup(id.SignerBlobRef, pn, tt.blobref) if err != nil { t.Fatalf("PathsLookup(%q): %v", tt.blobref, err) } if len(paths) != tt.want { t.Fatalf("PathsLookup(%q) got %d results; want %d", tt.blobref, len(paths), tt.want) } // and check that modtime is now claim4Time if tt.blobref == "with|pipe" { p := paths[0] want := fmt.Sprintf( "Path{Claim: %s, %s; Base: %s + Suffix \"with|pipe\" => Target targ-124}", claim2, claim2Time, pn) if g := p.String(); g != want { t.Errorf("claim wrong.\n got: %s\nwant: %s", g, want) } } } }
// populate hits the blobstore to populate map of child nodes. func (n *mutDir) populate() error { n.mu.Lock() defer n.mu.Unlock() // Only re-populate if we haven't done so recently. now := time.Now() if n.lastPop.Add(populateInterval).After(now) { return nil } n.lastPop = now res, err := n.fs.client.Describe(&search.DescribeRequest{ BlobRef: n.permanode, Depth: 3, }) if err != nil { log.Println("mutDir.paths:", err) return nil } db := res.Meta[n.permanode.String()] if db == nil { return errors.New("dir blobref not described") } // Find all child permanodes and stick them in n.children if n.children == nil { n.children = make(map[string]mutFileOrDir) } for k, v := range db.Permanode.Attr { const p = "camliPath:" if !strings.HasPrefix(k, p) || len(v) < 1 { continue } name := k[len(p):] childRef := v[0] child := res.Meta[childRef] if child == nil { log.Printf("child not described: %v", childRef) continue } if target := child.Permanode.Attr.Get("camliSymlinkTarget"); target != "" { // This is a symlink. n.children[name] = &mutFile{ fs: n.fs, permanode: blob.ParseOrZero(childRef), parent: n, name: name, symLink: true, target: target, } continue } if contentRef := child.Permanode.Attr.Get("camliContent"); contentRef != "" { // This is a file. content := res.Meta[contentRef] if content == nil { log.Printf("child content not described: %v", childRef) continue } if content.CamliType != "file" { log.Printf("child not a file: %v", childRef) continue } n.children[name] = &mutFile{ fs: n.fs, permanode: blob.ParseOrZero(childRef), parent: n, name: name, content: blob.ParseOrZero(contentRef), size: content.File.Size, } continue } // This is a directory. n.children[name] = &mutDir{ fs: n.fs, permanode: blob.ParseOrZero(childRef), parent: n, name: name, } } return nil }
// populate hits the blobstore to populate map of child nodes. func (n *roDir) populate() error { n.mu.Lock() defer n.mu.Unlock() ctx := context.TODO() // Things never change here, so if we've ever populated, we're // populated. if n.children != nil { return nil } log.Printf("roDir.populate(%q) - Sending request At %v", n.fullPath(), n.at) res, err := n.fs.client.Describe(ctx, &search.DescribeRequest{ BlobRef: n.permanode, Depth: 3, At: types.Time3339(n.at), }) if err != nil { log.Println("roDir.paths:", err) return nil } db := res.Meta[n.permanode.String()] if db == nil { return errors.New("dir blobref not described") } // Find all child permanodes and stick them in n.children n.children = make(map[string]roFileOrDir) for k, v := range db.Permanode.Attr { const p = "camliPath:" if !strings.HasPrefix(k, p) || len(v) < 1 { continue } name := k[len(p):] childRef := v[0] child := res.Meta[childRef] if child == nil { log.Printf("child not described: %v", childRef) continue } if target := child.Permanode.Attr.Get("camliSymlinkTarget"); target != "" { // This is a symlink. n.children[name] = &roFile{ fs: n.fs, permanode: blob.ParseOrZero(childRef), parent: n, name: name, symLink: true, target: target, } } else if isDir(child.Permanode) { // This is a directory. n.children[name] = &roDir{ fs: n.fs, permanode: blob.ParseOrZero(childRef), parent: n, name: name, at: n.at, } } else if contentRef := child.Permanode.Attr.Get("camliContent"); contentRef != "" { // This is a file. content := res.Meta[contentRef] if content == nil { log.Printf("child content not described: %v", childRef) continue } if content.CamliType != "file" { log.Printf("child not a file: %v", childRef) continue } n.children[name] = &roFile{ fs: n.fs, permanode: blob.ParseOrZero(childRef), parent: n, name: name, content: blob.ParseOrZero(contentRef), size: content.File.Size, } } else { // unknown type continue } n.children[name].xattr().load(child.Permanode) } return nil }