func TestQueryPermanodeModtime(t *testing.T) { testQuery(t, func(qt *queryTest) { id := qt.id // indextest advances time one second per operation: p1 := id.NewPlannedPermanode("1") p2 := id.NewPlannedPermanode("2") p3 := id.NewPlannedPermanode("3") id.SetAttribute(p1, "someAttr", "value1") // 2011-11-28 01:32:37.000123456 +0000 UTC 1322443957 id.SetAttribute(p2, "someAttr", "value2") // 2011-11-28 01:32:38.000123456 +0000 UTC 1322443958 id.SetAttribute(p3, "someAttr", "value3") // 2011-11-28 01:32:39.000123456 +0000 UTC 1322443959 sq := &SearchQuery{ Constraint: &Constraint{ Permanode: &PermanodeConstraint{ ModTime: &TimeConstraint{ After: types.Time3339(time.Unix(1322443957, 456789)), Before: types.Time3339(time.Unix(1322443959, 0)), }, }, }, } qt.wantRes(sq, p2) }) }
func (c *desCmd) RunCommand(args []string) error { if len(args) == 0 { return cmdmain.UsageError("requires blobref") } var blobs []blob.Ref for _, arg := range args { br, ok := blob.Parse(arg) if !ok { return cmdmain.UsageError(fmt.Sprintf("invalid blobref %q", arg)) } blobs = append(blobs, br) } var at time.Time // TODO: implement. from "2 days ago" "-2d", "-2h", "2013-02-05", etc cl := newClient(c.server) res, err := cl.Describe(&search.DescribeRequest{ BlobRefs: blobs, Depth: c.depth, At: types.Time3339(at), }) if err != nil { return err } resj, err := json.MarshalIndent(res, "", " ") if err != nil { return err } resj = append(resj, '\n') _, err = os.Stdout.Write(resj) return err }
// GetClaims returns the claims on req.Permanode signed by sh.owner. func (sh *Handler) GetClaims(req *ClaimsRequest) (*ClaimsResponse, error) { if !req.Permanode.Valid() { return nil, errors.New("Error getting claims: nil permanode.") } var claims []camtypes.Claim claims, err := sh.index.AppendClaims(claims, req.Permanode, sh.owner, req.AttrFilter) if err != nil { return nil, fmt.Errorf("Error getting claims of %s: %v", req.Permanode.String(), err) } sort.Sort(camtypes.ClaimsByDate(claims)) var jclaims []*ClaimsItem for _, claim := range claims { jclaim := &ClaimsItem{ BlobRef: claim.BlobRef, Signer: claim.Signer, Permanode: claim.Permanode, Date: types.Time3339(claim.Date), Type: claim.Type, Attr: claim.Attr, Value: claim.Value, } jclaims = append(jclaims, jclaim) } res := &ClaimsResponse{ Claims: jclaims, } return res, nil }
func (r *RecentRequest) URLSuffix() string { var buf bytes.Buffer fmt.Fprintf(&buf, "camli/search/recent?n=%d", r.n()) if !r.Before.IsZero() { fmt.Fprintf(&buf, "&before=%s", types.Time3339(r.Before)) } return buf.String() }
func (r *WithAttrRequest) URLSuffix() string { s := fmt.Sprintf("camli/search/permanodeattr?signer=%v&value=%v&fuzzy=%v&attr=%v&max=%v", r.Signer, url.QueryEscape(r.Value), r.Fuzzy, r.Attr, r.N) if !r.At.IsZero() { s += fmt.Sprintf("&at=%s", types.Time3339(r.At)) } return s }
func (b before) Predicate(ctx context.Context, args []string) (*Constraint, error) { t, err := parseTimePrefix(args[0]) if err != nil { return nil, err } tc := &TimeConstraint{} tc.Before = types.Time3339(t) c := &Constraint{ Permanode: &PermanodeConstraint{ Time: tc, }, } return c, nil }
// GetRecentPermanodes returns recently-modified permanodes. func (sh *Handler) GetRecentPermanodes(req *RecentRequest) (*RecentResponse, error) { ctx := context.TODO() sh.index.RLock() defer sh.index.RUnlock() ch := make(chan camtypes.RecentPermanode) errch := make(chan error, 1) before := time.Now() if !req.Before.IsZero() { before = req.Before } go func() { errch <- sh.index.GetRecentPermanodes(ctx, ch, sh.owner, req.n(), before) }() dr := sh.NewDescribeRequest() var recent []*RecentItem for res := range ch { dr.StartDescribe(ctx, res.Permanode, 2) recent = append(recent, &RecentItem{ BlobRef: res.Permanode, Owner: res.Signer, ModTime: types.Time3339(res.LastModTime), }) testHookBug121() // http://camlistore.org/issue/121 } if err := <-errch; err != nil { return nil, err } metaMap, err := dr.metaMap() if err != nil { return nil, err } res := &RecentResponse{ Recent: recent, Meta: metaMap, } return res, nil }
func (rh *RootHandler) serveDiscovery(rw http.ResponseWriter, req *http.Request) { d := &camtypes.Discovery{ BlobRoot: rh.BlobRoot, JSONSignRoot: rh.JSONSignRoot, HelpRoot: rh.helpRoot, ImporterRoot: rh.importerRoot, SearchRoot: rh.SearchRoot, ShareRoot: rh.shareRoot, StatusRoot: rh.statusRoot, OwnerName: rh.OwnerName, UserName: rh.Username, AuthToken: auth.DiscoveryToken(), ThumbVersion: images.ThumbnailVersion(), } if gener, ok := rh.Storage.(blobserver.Generationer); ok { initTime, gen, err := gener.StorageGeneration() if err != nil { d.StorageGenerationError = err.Error() } else { d.StorageInitTime = types.Time3339(initTime) d.StorageGeneration = gen } } else { log.Printf("Storage type %T is not a blobserver.Generationer; not sending storageGeneration", rh.Storage) } if rh.ui != nil { d.UIDiscovery = rh.ui.discovery() } if rh.sigh != nil { d.Signing = rh.sigh.Discovery(rh.JSONSignRoot) } if len(rh.sync) > 0 { syncHandlers := make([]camtypes.SyncHandlerDiscovery, 0, len(rh.sync)) for _, sh := range rh.sync { syncHandlers = append(syncHandlers, sh.discovery()) } d.SyncHandlers = syncHandlers } discoveryHelper(rw, req, d) }
func (c *desCmd) RunCommand(args []string) error { if len(args) == 0 { return cmdmain.UsageError("requires blobref") } var blobs []blob.Ref for _, arg := range args { br, ok := blob.Parse(arg) if !ok { return cmdmain.UsageError(fmt.Sprintf("invalid blobref %q", arg)) } blobs = append(blobs, br) } var at time.Time // TODO: add "2 days ago" "-2d", "-2h", "2013-02-05", etc if c.at != "" { var err error at, err = time.Parse(time.RFC3339, c.at) if err != nil { return fmt.Errorf("error parsing --at value %q: %v", c.at, err) } } cl := newClient(c.server) res, err := cl.Describe(context.Background(), &search.DescribeRequest{ BlobRefs: blobs, Depth: c.depth, At: types.Time3339(at), }) if err != nil { return err } resj, err := json.MarshalIndent(res, "", " ") if err != nil { return err } resj = append(resj, '\n') _, err = os.Stdout.Write(resj) return err }
// populate hits the blobstore to populate map of child nodes. func (n *roDir) populate() error { n.mu.Lock() defer n.mu.Unlock() ctx := context.TODO() // Things never change here, so if we've ever populated, we're // populated. if n.children != nil { return nil } log.Printf("roDir.populate(%q) - Sending request At %v", n.fullPath(), n.at) res, err := n.fs.client.Describe(ctx, &search.DescribeRequest{ BlobRef: n.permanode, Depth: 3, At: types.Time3339(n.at), }) if err != nil { log.Println("roDir.paths:", err) return nil } db := res.Meta[n.permanode.String()] if db == nil { return errors.New("dir blobref not described") } // Find all child permanodes and stick them in n.children n.children = make(map[string]roFileOrDir) for k, v := range db.Permanode.Attr { const p = "camliPath:" if !strings.HasPrefix(k, p) || len(v) < 1 { continue } name := k[len(p):] childRef := v[0] child := res.Meta[childRef] if child == nil { log.Printf("child not described: %v", childRef) continue } if target := child.Permanode.Attr.Get("camliSymlinkTarget"); target != "" { // This is a symlink. n.children[name] = &roFile{ fs: n.fs, permanode: blob.ParseOrZero(childRef), parent: n, name: name, symLink: true, target: target, } } else if isDir(child.Permanode) { // This is a directory. n.children[name] = &roDir{ fs: n.fs, permanode: blob.ParseOrZero(childRef), parent: n, name: name, at: n.at, } } else if contentRef := child.Permanode.Attr.Get("camliContent"); contentRef != "" { // This is a file. content := res.Meta[contentRef] if content == nil { log.Printf("child content not described: %v", childRef) continue } if content.CamliType != "file" { log.Printf("child not a file: %v", childRef) continue } n.children[name] = &roFile{ fs: n.fs, permanode: blob.ParseOrZero(childRef), parent: n, name: name, content: blob.ParseOrZero(contentRef), size: content.File.Size, } } else { // unknown type continue } n.children[name].xattr().load(child.Permanode) } return nil }
func TestTimeConstraint(t *testing.T) { tests := []struct { c *TimeConstraint t time.Time want bool }{ { &TimeConstraint{ Before: types.Time3339(time.Unix(124, 0)), }, time.Unix(123, 0), true, }, { &TimeConstraint{ Before: types.Time3339(time.Unix(123, 0)), }, time.Unix(123, 1), false, }, { &TimeConstraint{ After: types.Time3339(time.Unix(123, 0)), }, time.Unix(123, 0), true, }, { &TimeConstraint{ After: types.Time3339(time.Unix(123, 0)), }, time.Unix(123, 1), true, }, { &TimeConstraint{ After: types.Time3339(time.Unix(123, 0)), }, time.Unix(122, 0), false, }, { // This test will pass for 20 years at least. &TimeConstraint{ InLast: 20 * year, }, time.Unix(1384034605, 0), true, }, { &TimeConstraint{ InLast: 1 * year, }, time.Unix(123, 0), false, }, } for i, tt := range tests { got := tt.c.timeMatches(tt.t) if got != tt.want { t.Errorf("%d. matches(tc=%+v, t=%v) = %v; want %v", i, tt.c, tt.t, got, tt.want) } } }
var keywordTests = []keywordTestcase{ // Core predicates { object: newAfter(), args: []string{"faulty"}, errContains: "faulty", }, { object: newAfter(), args: []string{"2013-02-03"}, want: &Constraint{ Permanode: &PermanodeConstraint{ Time: &TimeConstraint{ After: types.Time3339(testtime), }, }, }, }, { object: newBefore(), args: []string{"faulty"}, errContains: "faulty", }, { object: newBefore(), args: []string{"2013-02-03"}, want: &Constraint{
// b: the parsed file schema blob // mm: keys to populate func (ix *Index) populateFile(fetcher blob.Fetcher, b *schema.Blob, mm *mutationMap) (err error) { var times []time.Time // all creation or mod times seen; may be zero times = append(times, b.ModTime()) blobRef := b.BlobRef() fr, err := b.NewFileReader(fetcher) if err != nil { return err } defer fr.Close() mimeType, mr := magic.MIMETypeFromReader(fr) if mimeType == "" { mimeType = magic.MIMETypeByExtension(filepath.Ext(b.FileName())) } sha1 := sha1.New() var copyDest io.Writer = sha1 var imageBuf *keepFirstN // or nil if strings.HasPrefix(mimeType, "image/") { imageBuf = &keepFirstN{N: 512 << 10} copyDest = io.MultiWriter(copyDest, imageBuf) } size, err := io.Copy(copyDest, mr) if err != nil { return err } wholeRef := blob.RefFromHash(sha1) if imageBuf != nil { var conf images.Config decodeConfig := func(r filePrefixReader) error { conf, err = images.DecodeConfig(r) return err } if err := readPrefixOrFile(imageBuf.Bytes, fetcher, b, decodeConfig); err == nil { mm.Set(keyImageSize.Key(blobRef), keyImageSize.Val(fmt.Sprint(conf.Width), fmt.Sprint(conf.Height))) } var ft time.Time fileTime := func(r filePrefixReader) error { ft, err = schema.FileTime(r) return err } if err = readPrefixOrFile(imageBuf.Bytes, fetcher, b, fileTime); err == nil { times = append(times, ft) } if exifDebug { log.Printf("filename %q exif = %v, %v", b.FileName(), ft, err) } // TODO(mpl): find (generate?) more broken EXIF images to experiment with. indexEXIFData := func(r filePrefixReader) error { return indexEXIF(wholeRef, r, mm) } if err = readPrefixOrFile(imageBuf.Bytes, fetcher, b, indexEXIFData); err != nil { if exifDebug { log.Printf("error parsing EXIF: %v", err) } } } var sortTimes []time.Time for _, t := range times { if !t.IsZero() { sortTimes = append(sortTimes, t) } } sort.Sort(types.ByTime(sortTimes)) var time3339s string switch { case len(sortTimes) == 1: time3339s = types.Time3339(sortTimes[0]).String() case len(sortTimes) >= 2: oldest, newest := sortTimes[0], sortTimes[len(sortTimes)-1] time3339s = types.Time3339(oldest).String() + "," + types.Time3339(newest).String() } mm.Set(keyWholeToFileRef.Key(wholeRef, blobRef), "1") mm.Set(keyFileInfo.Key(blobRef), keyFileInfo.Val(size, b.FileName(), mimeType, wholeRef)) mm.Set(keyFileTimes.Key(blobRef), keyFileTimes.Val(time3339s)) if strings.HasPrefix(mimeType, "audio/") { indexMusic(io.NewSectionReader(fr, 0, fr.Size()), wholeRef, mm) } return nil }