func TestHubFiring(t *testing.T) { hub := &SimpleBlobHub{} ch := make(chan blob.Ref) bch := make(chan blob.Ref) blob1 := blob.MustParse("sha1-0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33") blobsame := blob.MustParse("sha1-0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33") hub.NotifyBlobReceived(blob1) // no-op hub.RegisterListener(ch) hub.RegisterBlobListener(blob1, bch) hub.NotifyBlobReceived(blobsame) tmr1 := time.NewTimer(1e9) select { case <-tmr1.C: t.Fatal("timer expired on receiving from ch") case got := <-ch: if got != blob1 { t.Fatalf("got wrong blob") } } select { case <-tmr1.C: t.Fatal("timer expired on receiving from bch") case got := <-bch: if got != blob1 { t.Fatalf("got wrong blob") } } tmr1.Stop() }
func TestShareExpiration(t *testing.T) { defer func() { clockNow = time.Now }() b, err := BlobFromReader( blob.MustParse("sha1-64ffa72fa9bcb2f825e7ed40b9451e5cadca4c2c"), strings.NewReader(`{"camliVersion": 1, "authType": "haveref", "camliSigner": "sha1-f2b0b7da718b97ce8c31591d8ed4645c777f3ef4", "camliType": "claim", "claimDate": "2013-09-08T23:58:53.656549677Z", "claimType": "share", "expires": "2013-09-09T23:58:53.65658012Z", "target": "sha1-f1d2d2f924e986ac86fdf7b36c94bcdf32beec15", "transitive": false ,"camliSig":"wsBcBAABCAAQBQJSLQ89CRApMaZ8JvWr2gAAcuEIABRQolhn+yKksfaBx6oLo18NWvWQ+aYweF+5Gu0TH0Ixur7t1o5HFtFSSfFISyggSZDJSjsxoxaawhWrvCe9dZuU2s/zgRpgUtd2xmBt82tLOn9JidnUavsNGFXbfCwdUBSkzN0vDYLmgXW0VtiybB354uIKfOInZor2j8Mq0p6pkWzK3qq9W0dku7iE96YFaTb4W7eOikqoSC6VpjC1/4MQWOYRHLcPcIEY6xJ8es2sYMMSNXuVaR9nMupz8ZcTygP4jh+lPR1OH61q/FSjpRp7GKt4wZ1PknYjMbnpIzVjiSz0MkYd65bpZwuPOwZh/h2kHW7wvHNQZfWUJHEsOAI==J2ID"}`), ) if err != nil { t.Fatal(err) } s, ok := b.AsShare() if !ok { t.Fatal("expected share") } clockNow = func() time.Time { return time.Unix(100, 0) } if s.IsExpired() { t.Error("expected not expired") } clockNow = func() time.Time { return time.Unix(1378687181+2*86400, 0) } if !s.IsExpired() { t.Error("expected expired") } // And without an expiration time: b, err = BlobFromReader( blob.MustParse("sha1-931875ec6b8d917b7aae9f672f4f92de1ffaeeb1"), strings.NewReader(`{"camliVersion": 1, "authType": "haveref", "camliSigner": "sha1-f2b0b7da718b97ce8c31591d8ed4645c777f3ef4", "camliType": "claim", "claimDate": "2013-09-09T01:01:09.907842963Z", "claimType": "share", "target": "sha1-64ffa72fa9bcb2f825e7ed40b9451e5cadca4c2c", "transitive": false ,"camliSig":"wsBcBAABCAAQBQJSLR3VCRApMaZ8JvWr2gAA14kIAKmi5rCI5JTBvHbBuAu7wPVA87BLXm/BaD6zjqOENB4U8B+6KxyuT6KXe9P591IDXdZmJTP5tesbLtKw0iAWiRf2ea0Y7Ms3K77nLnSZM5QIOzb4aQKd1668p/5KqU3VfNayoHt69YkXyKBkqyEPjHINzC03QuLz5NIEBMYJaNqKKtEtSgh4gG8BBYq5qQzdKFg/Hx7VhkhW1y/1wwGSFJjaiPFMIJsF4d/gaO01Ip7XLro63ccyCy81tqKHnVjv0uULmZdbpgd3RHGGSnW3c9BfqkGvc3Wl11UQKzqc9OT+WTAWp8TXg6bLES9sQNzerx2wUfjKB9J4Yrk14iBfjl8==AynO"}`), ) if err != nil { t.Fatal(err) } s, ok = b.AsShare() if !ok { t.Fatal("expected share") } clockNow = func() time.Time { return time.Unix(100, 0) } if s.IsExpired() { t.Error("expected not expired") } clockNow = func() time.Time { return time.Unix(1378687181+2*86400, 0) } if s.IsExpired() { t.Error("expected not expired") } }
func TestKVClaim(t *testing.T) { tests := []struct { k, v string ok bool want camtypes.Claim }{ { k: "claim|sha1-b380b3080f9c71faa5c1d82bbd4d583a473bc77d|2931A67C26F5ABDA|2011-11-28T01:32:37.000123456Z|sha1-b3d93daee62e40d36237ff444022f42d7d0e43f2", v: "set-attribute|tag|foo1|sha1-ad87ca5c78bd0ce1195c46f7c98e6025abbaf007", ok: true, want: camtypes.Claim{ BlobRef: blob.MustParse("sha1-b3d93daee62e40d36237ff444022f42d7d0e43f2"), Signer: blob.MustParse("sha1-ad87ca5c78bd0ce1195c46f7c98e6025abbaf007"), Permanode: blob.MustParse("sha1-b380b3080f9c71faa5c1d82bbd4d583a473bc77d"), Type: "set-attribute", Attr: "tag", Value: "foo1", Date: time.Time(types.ParseTime3339OrZero("2011-11-28T01:32:37.000123456Z")), }, }, } for _, tt := range tests { got, ok := index.ExpKvClaim(tt.k, tt.v, blob.Parse) if ok != tt.ok { t.Errorf("kvClaim(%q, %q) = ok %v; want %v", tt.k, tt.v, ok, tt.ok) continue } if got != tt.want { t.Errorf("kvClaim(%q, %q) = %+v; want %+v", tt.k, tt.v, got, tt.want) continue } } }
// extends handlerDescribeTestSetup but adds a camliContentImage to pn. func handlerDescribeTestSetupWithImage(fi *test.FakeIndex) index.Interface { handlerDescribeTestSetup(fi) pn := blob.MustParse("perma-123") imageRef := blob.MustParse("fakeref-789") fi.AddMeta(imageRef, "", 789) fi.AddClaim(owner, pn, "set-attribute", "camliContentImage", imageRef.String()) return fi }
func TestDescribeMarshal(t *testing.T) { // Empty Describe q := &SearchQuery{ Describe: &DescribeRequest{}, } enc, err := json.Marshal(q) if err != nil { t.Fatal(err) } if got, want := string(enc), `{"describe":{"blobref":null,"at":null}}`; got != want { t.Errorf("JSON: %s; want %s", got, want) } back := &SearchQuery{} err = json.Unmarshal(enc, back) if err != nil { t.Fatal(err) } if !reflect.DeepEqual(q, back) { t.Errorf("Didn't round-trip. Got %#v; want %#v", back, q) } // DescribeRequest with multiple blobref q = &SearchQuery{ Describe: &DescribeRequest{ BlobRefs: []blob.Ref{blob.MustParse("sha-1234"), blob.MustParse("sha-abcd")}, }, } enc, err = json.Marshal(q) if err != nil { t.Fatal(err) } if got, want := string(enc), `{"describe":{"blobrefs":["sha-1234","sha-abcd"],"blobref":null,"at":null}}`; got != want { t.Errorf("JSON: %s; want %s", got, want) } back = &SearchQuery{} err = json.Unmarshal(enc, back) if err != nil { t.Fatal(err) } if !reflect.DeepEqual(q, back) { t.Errorf("Didn't round-trip. Got %#v; want %#v", back, q) } // and the zero value q = &SearchQuery{} enc, err = json.Marshal(q) if err != nil { t.Fatal(err) } if string(enc) != "{}" { t.Errorf(`Zero value: %q; want null`, enc) } }
func (fi *FakeIndex) AddClaim(owner, permanode blob.Ref, claimType, attr, value string) { fi.lk.Lock() defer fi.lk.Unlock() date := fi.nextDate() claim := &search.Claim{ Permanode: permanode, Signer: blob.Ref{}, BlobRef: blob.Ref{}, Date: date, Type: claimType, Attr: attr, Value: value, } key := permanode.String() + "/" + owner.String() fi.ownerClaims[key] = append(fi.ownerClaims[key], claim) if claimType == "set-attribute" && strings.HasPrefix(attr, "camliPath:") { suffix := attr[len("camliPath:"):] path := &search.Path{ Target: blob.MustParse(value), Suffix: suffix, } fi.path[fmt.Sprintf("%s\x00%s\x00%s", owner, permanode, suffix)] = path } }
func missingBlobs(dbDir, blobDir string) error { fsck, err := db.NewRO(dbDir) if err != nil { return err } defer fsck.Close() bs, err := dir.New(blobDir) if err != nil { return err } // defer s.Close() - where is this?? missing := 0 // TODO(dichro): cache Parents() call results? for ref := range fsck.Missing() { if body, size, err := bs.Fetch(blob.MustParse(ref)); err == nil { log.Printf("missing ref %q found with size %d", ref, size) body.Close() continue } fmt.Println(ref) missing++ nodes, err := fsck.Parents(ref) if err != nil { log.Print(err) continue } printHierarchy(fsck, bs, 1, "", nodes) } fmt.Println("total", missing) return nil }
func TestShareSearchSerialization(t *testing.T) { signer := blob.MustParse("yyy-5678") q := &search.SearchQuery{ Expression: "is:image", Limit: 42, } bb := schema.NewShareRef(schema.ShareHaveRef, true) bb.SetShareSearch(q) bb = bb.SetSigner(signer) bb = bb.SetClaimDate(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC)) s := bb.Blob().JSON() want := `{"camliVersion": 1, "authType": "haveref", "camliSigner": "yyy-5678", "camliType": "claim", "claimDate": "2009-11-10T23:00:00Z", "claimType": "share", "search": { "expression": "is:image", "limit": 42, "around": null }, "transitive": true }` if want != s { t.Errorf("Incorrect serialization of shared search. Wanted:\n %s\nGot:\n%s\n", want, s) } }
func TestCorpusAppendPermanodeAttrValues(t *testing.T) { c, pn, sig1, sig2 := newTestCorpusWithPermanode() s := func(s ...string) []string { return s } sigMissing := blob.MustParse("xyz-123") tests := []struct { attr string want []string t time.Time sig blob.Ref }{ {attr: "not-exist", want: s()}, {attr: "DelAll", want: s()}, {attr: "DelOne", want: s("b", "c", "e")}, {attr: "foo", want: s("foov")}, {attr: "tag", want: s("c", "e", "f")}, {attr: "tag", want: s("a", "b"), t: time.Unix(102, 0)}, {attr: "SetAfterAdd", want: s("setv")}, // sig1 {attr: "not-exist", want: s(), sig: sig1}, {attr: "DelAll", want: s(), sig: sig1}, {attr: "DelOne", want: s("b", "c"), sig: sig1}, {attr: "foo", want: s("foov"), sig: sig1}, {attr: "tag", want: s("c", "e"), sig: sig1}, {attr: "tag", want: s("a", "b"), t: time.Unix(102, 0), sig: sig1}, {attr: "SetAfterAdd", want: s("setv"), sig: sig1}, // sig2 {attr: "DelAll", want: s("c"), sig: sig2}, {attr: "DelOne", want: s("e"), sig: sig2}, {attr: "tag", want: s("d"), t: time.Unix(105, 0), sig: sig2}, {attr: "SetAfterAdd", want: s("c"), sig: sig2}, // sigMissing (not present in pn) {attr: "tag", want: s(), sig: sigMissing}, } for i, tt := range tests { got := c.AppendPermanodeAttrValues(nil, pn, tt.attr, tt.t, tt.sig) if len(got) == 0 && len(tt.want) == 0 { continue } if !reflect.DeepEqual(got, tt.want) { t.Errorf("%d. attr %q = %q; want %q", i, tt.attr, got, tt.want) } if !tt.t.IsZero() { // skip equivalence test if specific time was given continue } got = c.AppendPermanodeAttrValues(nil, pn, tt.attr, time.Unix(200, 0), tt.sig) if len(got) == 0 && len(tt.want) == 0 { continue } if !reflect.DeepEqual(got, tt.want) { t.Errorf("%d. attr %q = %q; want %q", i, tt.attr, got, tt.want) } } }
func okayWithoutMeta(refStr string) func(*packTest) { return func(pt *packTest) { if pt.okayNoMeta == nil { pt.okayNoMeta = map[blob.Ref]bool{} } pt.okayNoMeta[blob.MustParse(refStr)] = true } }
func setupContent(owner blob.Ref, rootName string) *test.FakeIndex { picNode := blob.MustParse("picpn-1234") galRef := blob.MustParse("gal-1234") rootRef := blob.MustParse("root-abcd") camp0 := blob.MustParse("picpn-9876543210") camp1 := blob.MustParse("picpn-9876543211") camp0f := blob.MustParse("picfile-f00ff00f00a5") camp1f := blob.MustParse("picfile-f00ff00f00b6") idx := test.NewFakeIndex() idx.AddSignerAttrValue(owner, "camliRoot", rootName, rootRef) idx.AddMeta(owner, "", 100) for _, br := range []blob.Ref{picNode, galRef, rootRef, camp0, camp1} { idx.AddMeta(br, "permanode", 100) } for _, br := range []blob.Ref{camp0f, camp1f} { idx.AddMeta(br, "file", 100) } idx.AddClaim(owner, rootRef, "set-attribute", "camliPath:singlepic", picNode.String()) idx.AddClaim(owner, rootRef, "set-attribute", "camliPath:camping", galRef.String()) idx.AddClaim(owner, galRef, "add-attribute", "camliMember", camp0.String()) idx.AddClaim(owner, galRef, "add-attribute", "camliMember", camp1.String()) idx.AddClaim(owner, camp0, "set-attribute", "camliContent", camp0f.String()) idx.AddClaim(owner, camp1, "set-attribute", "camliContent", camp1f.String()) return idx }
func testMergeFileInfoRow(t *testing.T, wholeRef string) { c := index.ExpNewCorpus() value := "100|something%2egif|image%2Fgif" want := camtypes.FileInfo{ Size: 100, MIMEType: "image/gif", FileName: "something.gif", } if wholeRef != "" { value += "|" + wholeRef want.WholeRef = blob.MustParse(wholeRef) } c.Exp_mergeFileInfoRow("fileinfo|sha1-579f7f246bd420d486ddeb0dadbb256cfaf8bf6b", value) fi := c.Exp_files(blob.MustParse("sha1-579f7f246bd420d486ddeb0dadbb256cfaf8bf6b")) if !reflect.DeepEqual(want, fi) { t.Errorf("Got %+v; want %+v", fi, want) } }
func handlerDescribeTestSetup(fi *test.FakeIndex) index.Interface { pn := blob.MustParse("perma-123") fi.AddMeta(pn, "permanode", 123) fi.AddClaim(owner, pn, "set-attribute", "camliContent", "fakeref-232") fi.AddMeta(blob.MustParse("fakeref-232"), "", 878) // Test deleting all attributes fi.AddClaim(owner, pn, "add-attribute", "wont-be-present", "x") fi.AddClaim(owner, pn, "add-attribute", "wont-be-present", "y") fi.AddClaim(owner, pn, "del-attribute", "wont-be-present", "") // Test deleting a specific attribute. fi.AddClaim(owner, pn, "add-attribute", "only-delete-b", "a") fi.AddClaim(owner, pn, "add-attribute", "only-delete-b", "b") fi.AddClaim(owner, pn, "add-attribute", "only-delete-b", "c") fi.AddClaim(owner, pn, "del-attribute", "only-delete-b", "b") return fi }
func TestPaths(t *testing.T) { br := blob.MustParse("digalg-abcd") ds := &DiskStorage{root: "/tmp/dir"} slash := filepath.ToSlash if e, g := "/tmp/dir/digalg/abc/d__", slash(ds.blobDirectory("", br)); e != g { t.Errorf("short blobref dir; expected path %q; got %q", e, g) } if e, g := "/tmp/dir/digalg/abc/d__/digalg-abcd.dat", slash(ds.blobPath("", br)); e != g { t.Errorf("short blobref path; expected path %q; got %q", e, g) } br = blob.MustParse("sha1-c22b5f9178342609428d6f51b2c5af4c0bde6a42") if e, g := "/tmp/dir/partition/foo/sha1/c22/b5f", slash(ds.blobDirectory("foo", br)); e != g { t.Errorf("amazon queue dir; expected path %q; got %q", e, g) } }
func sendTestBlobs(ch chan blob.SizedRef, list string, size uint32) { defer close(ch) if list == "" { return } for _, br := range strings.Split(list, ",") { ch <- blob.SizedRef{blob.MustParse(br), size} } }
func addPermanode(fi *test.FakeIndex, pnStr string, attrs ...string) { pn := blob.MustParse(pnStr) fi.AddMeta(pn, "permanode", 123) for len(attrs) > 0 { k, v := attrs[0], attrs[1] attrs = attrs[2:] fi.AddClaim(owner, pn, "add-attribute", k, v) } }
func TestBlobFromURLPath(t *testing.T) { br := blobFromURLPath("/foo/bar/camli/sha1-f1d2d2f924e986ac86fdf7b36c94bcdf32beec15") if !br.Valid() { t.Fatal("nothing found") } want := blob.MustParse("sha1-f1d2d2f924e986ac86fdf7b36c94bcdf32beec15") if want != br { t.Fatalf("got = %v; want %v", br, want) } }
func sendTestBlobs(ch chan blob.SizedRef, list string) { defer close(ch) if list == "" { return } for _, b := range strings.Split(list, ",") { br := blob.MustParse(b) ch <- blob.SizedRef{Ref: br, Size: 123} } }
func TestDescribeLocation(t *testing.T) { tests := []struct { ref string lat, long float64 hasNoLoc bool }{ {ref: "filewithloc-0", lat: 45, long: 56}, {ref: "location-0", lat: 45, long: 56}, {ref: "locationpriority-1", lat: 67, long: 78}, {ref: "locationpriority-2", lat: 12, long: 34}, {ref: "locationoverride-1", lat: 67, long: 78}, {ref: "locationoverride-2", lat: 67, long: 78}, {ref: "homedir-0", hasNoLoc: true}, } ix := searchDescribeSetup(test.NewFakeIndex()) ctx := context.Background() h := search.NewHandler(ix, owner) ix.RLock() defer ix.RUnlock() for _, tt := range tests { var err error br := blob.MustParse(tt.ref) res, err := h.Describe(ctx, &search.DescribeRequest{ BlobRef: br, Depth: 1, }) if err != nil { t.Errorf("Describe for %v failed: %v", br, err) continue } db := res.Meta[br.String()] if db == nil { t.Errorf("Describe result for %v is missing", br) continue } loc := db.Location if tt.hasNoLoc { if loc != nil { t.Errorf("got location for %v, should have no location", br) } } else { if loc == nil { t.Errorf("no location in result for %v", br) continue } if loc.Latitude != tt.lat || loc.Longitude != tt.long { t.Errorf("location for %v invalid, got %f,%f want %f,%f", tt.ref, loc.Latitude, loc.Longitude, tt.lat, tt.long) } } } }
func newTestCorpusWithPermanode() (*index.Corpus, blob.Ref) { c := index.ExpNewCorpus() pn := blob.MustParse("abc-123") tm := time.Unix(99, 0) claim := func(verb, attr, val string) *camtypes.Claim { tm = tm.Add(time.Second) return &camtypes.Claim{ Type: verb + "-attribute", Attr: attr, Value: val, Date: tm, } } c.SetClaims(pn, []*camtypes.Claim{ claim("set", "foo", "foov"), // time 100 claim("add", "tag", "a"), // time 101 claim("add", "tag", "b"), // time 102 claim("del", "tag", ""), claim("add", "tag", "c"), claim("add", "tag", "d"), claim("add", "tag", "e"), claim("del", "tag", "d"), claim("add", "DelAll", "a"), claim("add", "DelAll", "b"), claim("add", "DelAll", "c"), claim("del", "DelAll", ""), claim("add", "DelOne", "a"), claim("add", "DelOne", "b"), claim("add", "DelOne", "c"), claim("add", "DelOne", "d"), claim("del", "DelOne", "d"), claim("del", "DelOne", "a"), claim("add", "SetAfterAdd", "a"), claim("add", "SetAfterAdd", "b"), claim("set", "SetAfterAdd", "setv"), // add an element with fixed time to test // slow and fast path equivalence // (lookups based on pm.Claims and pm.Attrs, respectively) { Type: "set-attribute", Attr: "CacheTest", Value: "foo", Date: time.Unix(201, 0), }, }) return c, pn }
func TestServeBlobRef_Range(t *testing.T) { req, _ := http.NewRequest("GET", "/path/isn't/used", nil) req.Header.Set("Range", "bytes=0-2") br := blob.MustParse("foo-000") rr := httptest.NewRecorder() rr.Body = new(bytes.Buffer) ServeBlobRef(rr, req, br, fetcher{strings.NewReader("foobar"), 6}) if rr.Body.String() != "foo" { t.Errorf("Got %q; want foo", rr.Body) } }
func Files(t *testing.T, initIdx func() *index.Index) { ctx := context.Background() id := NewIndexDeps(initIdx()) id.Fataler = t fileTime := time.Unix(1361250375, 0) fileRef, wholeRef := id.UploadFile("foo.html", "<html>I am an html file.</html>", fileTime) t.Logf("uploaded fileref %q, wholeRef %q", fileRef, wholeRef) id.DumpIndex(t) // ExistingFileSchemas { key := fmt.Sprintf("wholetofile|%s|%s", wholeRef, fileRef) if g, e := id.Get(key), "1"; g != e { t.Fatalf("%q = %q, want %q", key, g, e) } refs, err := id.Index.ExistingFileSchemas(wholeRef) if err != nil { t.Fatalf("ExistingFileSchemas = %v", err) } want := []blob.Ref{fileRef} if !reflect.DeepEqual(refs, want) { t.Errorf("ExistingFileSchemas got = %#v, want %#v", refs, want) } } // FileInfo { key := fmt.Sprintf("fileinfo|%s", fileRef) if g, e := id.Get(key), "31|foo.html|text%2Fhtml|sha1-153cb1b63a8f120a0e3e14ff34c64f169df9430f"; g != e { t.Fatalf("%q = %q, want %q", key, g, e) } fi, err := id.Index.GetFileInfo(ctx, fileRef) if err != nil { t.Fatalf("GetFileInfo = %v", err) } if got, want := fi.Size, int64(31); got != want { t.Errorf("Size = %d, want %d", got, want) } if got, want := fi.FileName, "foo.html"; got != want { t.Errorf("FileName = %q, want %q", got, want) } if got, want := fi.MIMEType, "text/html"; got != want { t.Errorf("MIMEType = %q, want %q", got, want) } if got, want := fi.Time, fileTime; !got.Time().Equal(want) { t.Errorf("Time = %v; want %v", got, want) } if got, want := fi.WholeRef, blob.MustParse("sha1-153cb1b63a8f120a0e3e14ff34c64f169df9430f"); got != want { t.Errorf("WholeRef = %v; want %v", got, want) } } }
func TestHubRegistration(t *testing.T) { hub := &SimpleBlobHub{} ch := make(chan blob.Ref) ch2 := make(chan blob.Ref) b1 := blob.MustParse("sha1-0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33") b2 := blob.MustParse("sha1-62cdb7020ff920e5aa642c3d4066950dd1f01f4d") Expect(t, hub.listeners == nil, "hub.listeners is nil before RegisterListener") hub.RegisterListener(ch) ExpectInt(t, 1, len(hub.listeners), "len(hub.listeners) after RegisterListener") hub.RegisterListener(ch2) ExpectInt(t, 2, len(hub.listeners), "len(hub.listeners) after ch2 RegisterListener") hub.UnregisterListener(ch) ExpectInt(t, 1, len(hub.listeners), "len(hub.listeners) after UnregisterListener") hub.UnregisterListener(ch2) ExpectInt(t, 0, len(hub.listeners), "len(hub.listeners) after UnregisterListener") Expect(t, hub.blobListeners == nil, "hub.blobListeners is nil before RegisterBlobListener") hub.RegisterBlobListener(b1, ch) Expect(t, hub.blobListeners != nil, "hub.blobListeners is not nil before RegisterBlobListener") Expect(t, hub.blobListeners[b1.String()] != nil, "b1 in hub.blobListeners map") ExpectInt(t, 1, len(hub.blobListeners[b1.String()]), "hub.blobListeners[b1] size") ExpectInt(t, 1, len(hub.blobListeners), "hub.blobListeners size") hub.RegisterBlobListener(b2, ch) ExpectInt(t, 1, len(hub.blobListeners[b2.String()]), "hub.blobListeners[b1] size") ExpectInt(t, 2, len(hub.blobListeners), "hub.blobListeners size") hub.UnregisterBlobListener(b2, ch) Expect(t, hub.blobListeners[b2.String()] == nil, "b2 not in hub.blobListeners") ExpectInt(t, 1, len(hub.blobListeners), "hub.blobListeners size") hub.UnregisterBlobListener(b1, ch) Expect(t, hub.blobListeners[b1.String()] == nil, "b1 not in hub.blobListeners") ExpectInt(t, 0, len(hub.blobListeners), "hub.blobListeners size") }
func TestPaths(t *testing.T) { br := blob.MustParse("digalg-abc") ds := &DiskStorage{root: "/tmp/dir"} slash := filepath.ToSlash if e, g := "/tmp/dir/digalg/ab/c_", slash(ds.blobDirectory(br)); e != g { t.Errorf("short blobref dir; expected path %q; got %q", e, g) } if e, g := "/tmp/dir/digalg/ab/c_/digalg-abc.dat", slash(ds.blobPath(br)); e != g { t.Errorf("short blobref path; expected path %q; got %q", e, g) } }
func TestAsClaimAndAsShare(t *testing.T) { br := blob.MustParse("xxx-1234") signer := blob.MustParse("yyy-5678") bb := NewSetAttributeClaim(br, "title", "Test Title") bb = bb.SetSigner(signer) bb = bb.SetClaimDate(time.Now()) c1 := bb.Blob() c1.ss.Sig = "non-null-sig" // required by AsShare bb = NewShareRef(ShareHaveRef, br, true) bb = bb.SetSigner(signer) bb = bb.SetClaimDate(time.Now()) c2 := bb.Blob() c2.ss.Sig = "non-null-sig" // required by AsShare if !br.Valid() { t.Error("Blobref not valid") } _, ok := c1.AsClaim() if !ok { t.Error("Claim 1 not returned as claim") } _, ok = c2.AsClaim() if !ok { t.Error("Claim 2 not returned as claim") } s, ok := c1.AsShare() if ok { t.Error("Title claim returned share", s) } s, ok = c2.AsShare() if !ok { t.Error("Share claim failed to return share") } }
func testServeBlobRef(w io.Writer, fetcher blob.Fetcher) *httptest.ResponseRecorder { req, _ := http.NewRequest("GET", "/path/isn't/used", nil) br := blob.MustParse("foo-123") rr := httptest.NewRecorder() rr.Body = new(bytes.Buffer) var rw http.ResponseWriter = rr if w != nil { rw = &altWriterRecorder{io.MultiWriter(w, rr.Body), rr} } ServeBlobRef(rw, req, br, fetcher) return rr }
func TestMergeFileInfoRow(t *testing.T) { c := index.ExpNewCorpus() c.Exp_mergeFileInfoRow("fileinfo|sha1-579f7f246bd420d486ddeb0dadbb256cfaf8bf6b", "100|something%2egif|image%2Fgif") fi := c.Exp_files(blob.MustParse("sha1-579f7f246bd420d486ddeb0dadbb256cfaf8bf6b")) want := camtypes.FileInfo{ Size: 100, MIMEType: "image/gif", FileName: "something.gif", } if !reflect.DeepEqual(want, fi) { t.Errorf("Got %+v; want %+v", fi, want) } }
func TestHandlerWrongRef(t *testing.T) { storage := new(test.Fetcher) ref := blob.MustParse("sha1-f1d2d2f924e986ac86fdf7b36c94bcdf32beec15") wrongRefString := "sha1-e242ed3bffccdf271b7fbaf34ed72d089537b42f" ts := httptest.NewServer(createVideothumbnailHandler(ref, storage)) defer ts.Close() resp, err := http.Get(ts.URL + "/" + wrongRefString) if err != nil { t.Fatal(err) } if resp.StatusCode != 403 { t.Fatalf("excepted forbidden status when the wrong ref is requested") } }
// wholeFileDigest returns the sha1 digest of the regular file's absolute // path given in fullPath. func (up *Uploader) wholeFileDigest(fullPath string) (blob.Ref, error) { // TODO(bradfitz): cache this. file, err := up.open(fullPath) if err != nil { return blob.Ref{}, err } defer file.Close() td := &trackDigestReader{r: file} _, err = io.Copy(ioutil.Discard, td) atomic.AddInt64(&atomicDigestOps, 1) if err != nil { return blob.Ref{}, err } return blob.MustParse(td.Sum()), nil }
func TestParseFields(t *testing.T) { tests := []struct { in string want []interface{} err string }{ {in: "5 17", want: []interface{}{uint64(5), uint32(17)}}, {in: "1", want: []interface{}{uint64(1)}}, {in: "1", want: []interface{}{int64(1)}}, {in: "5 sha1-0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33 8", want: []interface{}{ int64(5), blob.MustParse("sha1-0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33"), uint32(8), }, }, {in: "-5", want: []interface{}{int64(-5)}, err: "invalid syntax"}, } for i, tt := range tests { var gotp []interface{} var gotrv []reflect.Value for _, wantv := range tt.want { rv := reflect.New(reflect.TypeOf(wantv)) gotrv = append(gotrv, rv) gotp = append(gotp, rv.Interface()) } gotErr := ParseFields([]byte(tt.in), gotp...) if gotErr != nil && tt.err != "" { if strings.Contains(gotErr.Error(), tt.err) { continue } t.Errorf("%d. error = %v; want substring %q", i, gotErr, tt.err) continue } if (gotErr != nil) != (tt.err != "") { t.Errorf("%d. error = %v; want substring %q", i, gotErr, tt.err) continue } var got []interface{} for _, rv := range gotrv { got = append(got, rv.Elem().Interface()) } if !reflect.DeepEqual(got, tt.want) { t.Errorf("%d. got = %#v; want %#v", i, got, tt.want) } } }