func TestGenerateFailsWithShortCredentials(t *testing.T) { cg := HMACStrategy{GlobalSecret: []byte("foo")} challenge, signature, err := cg.Generate() require.NotNil(t, err, "%s", err) require.Empty(t, challenge) require.Empty(t, signature) }
func testStoreFeatures(t *testing.T, store Store) { // it should failt to get a feature that doesn't exist found, err := store.GetFeatureByName("awesome_feature") require.Equal(t, ErrNoRows, err) // it should get an empty list when there is no features list, err := store.ListFeatures() require.NoError(t, err) require.Empty(t, list) // it should create a feature feature := &models.Feature{ Name: "awesome_feature", } err = store.CreateFeature(feature) require.NoError(t, err) require.NotEqual(t, feature.ID, 0) require.Equal(t, "awesome_feature", feature.Name) require.Empty(t, feature.Status) // it should update a feature feature.Name = "not_awesome_feature" err = store.UpdateFeature(feature) require.NoError(t, err) // it should list all features list, err = store.ListFeatures() require.NoError(t, err) require.Len(t, list, 1) // it should get a feature with the status of all environments err = store.CreateEnvironment(&models.Environment{ Name: "staging", }) require.NoError(t, err) found, err = store.GetFeatureByName("not_awesome_feature") require.NoError(t, err) require.Len(t, found.Status, 1) require.Equal(t, false, found.Status["staging"]) // it should update the status of a feature in an environment found.Status["staging"] = true err = store.UpdateFeature(found) require.NoError(t, err) found, err = store.GetFeatureByName("not_awesome_feature") require.NoError(t, err) require.Equal(t, true, found.Status["staging"]) // it should list all features with the status on all environments list, err = store.ListFeatures() require.NoError(t, err) require.Len(t, list, 1) require.Equal(t, found.ID, list[0].ID) require.True(t, list[0].Status["staging"]) }
// If applying a change fails due to a prefix error, changeTargetMeta fails outright func TestChangeTargetMetaFailsIfPrefixError(t *testing.T) { repo, cs, err := testutils.EmptyRepo("docker.com/notary") require.NoError(t, err) newKey, err := cs.Create("targets/level1", "docker.com/notary", data.ED25519Key) require.NoError(t, err) err = repo.UpdateDelegationKeys("targets/level1", []data.PublicKey{newKey}, []string{}, 1) require.NoError(t, err) err = repo.UpdateDelegationPaths("targets/level1", []string{"pathprefix"}, []string{}, false) require.NoError(t, err) hash := sha256.Sum256([]byte{}) f := &data.FileMeta{ Length: 1, Hashes: map[string][]byte{ "sha256": hash[:], }, } fjson, err := json.Marshal(f) require.NoError(t, err) err = changeTargetMeta(repo, &changelist.TUFChange{ Actn: changelist.ActionCreate, Role: "targets/level1", ChangeType: "target", ChangePath: "notPathPrefix", Data: fjson, }) require.Error(t, err) // no target in targets or targets/latest require.Empty(t, repo.Targets[data.CanonicalTargetsRole].Signed.Targets) require.Empty(t, repo.Targets["targets/level1"].Signed.Targets) }
// TestIngestStoreItemsAreInStorage makes sure that the items we sent off // to long-term storage in AWS actually made it there. func TestIngestStoreItemsAreInStorage(t *testing.T) { if !apt_testutil.ShouldRunIntegrationTests() { t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.") } _context, err := apt_testutil.GetContext("integration.json") require.Nil(t, err, "Could not create context") maxItemsToList := int64(1) // s3List lists bucket contents. s3List := network.NewS3ObjectList( constants.AWSVirginia, _context.Config.DPN.DPNPreservationBucket, maxItemsToList) // s3Head gets metadata about specific objects in S3/Glacier. s3Head := network.NewS3Head(_context.Config.APTrustS3Region, _context.Config.DPN.DPNPreservationBucket) pathToLogFile := filepath.Join(_context.Config.LogDirectory, "dpn_ingest_store.json") for _, s3Key := range apt_testutil.INTEGRATION_GOOD_BAGS[0:7] { parts := strings.Split(s3Key, "/") localTarFileName := parts[1] // APTrust bag name. E.g. "test.edu.test_123.tar" manifest, err := apt_testutil.FindDPNIngestManifestInLog(pathToLogFile, localTarFileName) require.Nil(t, err, "Could not find JSON record for %s", localTarFileName) parts = strings.Split(manifest.StorageURL, "/") dpnTarFileName := parts[len(parts)-1] // DPN bag name: <uuid>.tar s3List.GetList(dpnTarFileName) require.Empty(t, s3List.ErrorMessage) require.EqualValues(t, 1, len(s3List.Response.Contents), "Nothing in S3 for %s", dpnTarFileName) obj := s3List.Response.Contents[0] assert.Equal(t, dpnTarFileName, *obj.Key) // Make sure each item has the expected metadata. // s3Head.Response.Metadata is map[string]*string. s3Head.Head(dpnTarFileName) require.Empty(t, s3Head.ErrorMessage) metadata := s3Head.Response.Metadata require.NotNil(t, metadata, dpnTarFileName) // Notice the Amazon library transforms the first letter of // all our keys to upper case. WTF? require.NotNil(t, metadata["From_node"], dpnTarFileName) require.NotNil(t, metadata["Transfer_id"], dpnTarFileName) require.NotNil(t, metadata["Member"], dpnTarFileName) require.NotNil(t, metadata["Local_id"], dpnTarFileName) require.NotNil(t, metadata["Version"], dpnTarFileName) assert.NotEmpty(t, *metadata["From_node"], dpnTarFileName) assert.NotEmpty(t, *metadata["Transfer_id"], dpnTarFileName) assert.NotEmpty(t, *metadata["Member"], dpnTarFileName) assert.NotEmpty(t, *metadata["Local_id"], dpnTarFileName) assert.NotEmpty(t, *metadata["Version"], dpnTarFileName) } }
func TestDPNBagInfo(t *testing.T) { builder := createBagBuilder(t) defer tearDown() require.NotNil(t, builder) tagfile, err := builder.Bag.TagFile("bag-info.txt") require.Nil(t, err) require.Empty(t, builder.ErrorMessage) require.NotNil(t, tagfile) expected := filepath.Join(builder.LocalPath, "bag-info.txt") assert.Equal(t, expected, tagfile.Name()) assert.True(t, fileutil.FileExists(expected)) verifyTagField(t, tagfile, "Source-Organization", "uc.edu") verifyTagField(t, tagfile, "Organization-Address", "") verifyTagField(t, tagfile, "Contact-Name", "") verifyTagField(t, tagfile, "Contact-Phone", "") verifyTagField(t, tagfile, "Contact-Email", "") verifyTagField(t, tagfile, "Bagging-Date", builder.BagTime()) verifyTagField(t, tagfile, "Bag-Size", "686") verifyTagField(t, tagfile, "Bag-Group-Identifier", "") verifyTagField(t, tagfile, "Bag-Count", "1") // Make sure the bagging date was set. assert.NotEqual(t, builder.BagTime(), "0001-01-01T00:00:00Z") }
func TestMergeSorted5(t *testing.T) { input := []*task.List{ newList([]uint64{}), newList([]uint64{}), } require.Empty(t, MergeSorted(input).Uids) }
func TestDPNInfo(t *testing.T) { builder := createBagBuilder(t) defer tearDown() require.NotNil(t, builder) tagfile, err := builder.Bag.TagFile("dpn-tags/dpn-info.txt") require.Nil(t, err) require.Empty(t, builder.ErrorMessage) require.NotNil(t, tagfile) expected := filepath.Join(builder.LocalPath, "dpn-tags", "dpn-info.txt") assert.Equal(t, expected, tagfile.Name()) assert.True(t, fileutil.FileExists(expected)) verifyTagField(t, tagfile, "DPN-Object-ID", builder.UUID) verifyTagField(t, tagfile, "Local-ID", "uc.edu/cin.675812") verifyTagField(t, tagfile, "First-Node-Name", "APTrust") verifyTagField(t, tagfile, "First-Node-Address", "160 McCormick Rd., Charlottesville, VA 22904") verifyTagField(t, tagfile, "First-Node-Contact-Name", "APTrust Administrator") verifyTagField(t, tagfile, "First-Node-Contact-Email", "*****@*****.**") verifyTagField(t, tagfile, "Version-Number", "1") verifyTagField(t, tagfile, "Previous-Version-Object-ID", "") verifyTagField(t, tagfile, "Interpretive-Object-ID", "") verifyTagField(t, tagfile, "Rights-Object-ID", "") verifyTagField(t, tagfile, "Object-Type", dpn.BAG_TYPE_DATA) }
func TestGenericFileMergeAttributes(t *testing.T) { gf1 := testutil.MakeGenericFile(3, 3, "test.edu/file1.txt") gf2 := testutil.MakeGenericFile(3, 3, "test.edu/file1.txt") // Match up identifiers, so copy assignment will work. for i, event := range gf1.PremisEvents { gf2.PremisEvents[i].Identifier = event.Identifier } for i, cs := range gf1.Checksums { gf2.Checksums[i].Algorithm = cs.Algorithm gf2.Checksums[i].Digest = cs.Digest } errors := gf1.MergeAttributes(gf2) require.Empty(t, errors) for i, event := range gf1.PremisEvents { assert.Equal(t, gf2.PremisEvents[i].Id, event.Id) assert.Equal(t, gf2.PremisEvents[i].CreatedAt, event.CreatedAt) assert.Equal(t, gf2.PremisEvents[i].UpdatedAt, event.UpdatedAt) // Ids and identifiers should propagate assert.Equal(t, gf1.Id, event.GenericFileId) assert.Equal(t, gf1.Identifier, event.GenericFileIdentifier) assert.Equal(t, gf1.IntellectualObjectId, event.IntellectualObjectId) assert.Equal(t, gf1.IntellectualObjectIdentifier, event.IntellectualObjectIdentifier) } for i, cs := range gf1.Checksums { assert.Equal(t, gf2.Checksums[i].Id, cs.Id) assert.Equal(t, gf2.Checksums[i].CreatedAt, cs.CreatedAt) assert.Equal(t, gf2.Checksums[i].UpdatedAt, cs.UpdatedAt) // Ids should propagate too assert.Equal(t, gf1.Id, cs.GenericFileId) } }
// If encryption fails, saving will fail func TestSaveEncryptionFails(t *testing.T) { metadata, entries, snapshot := makeWALData(1, 1) tempdir, err := ioutil.TempDir("", "waltests") require.NoError(t, err) os.RemoveAll(tempdir) defer os.RemoveAll(tempdir) // fail encrypting one of the entries, but not the first one c := NewWALFactory(&meowCrypter{encryptFailures: map[string]struct{}{ "Entry 3": {}, }}, nil) wrapped, err := c.Create(tempdir, metadata) require.NoError(t, err) require.NoError(t, wrapped.SaveSnapshot(snapshot)) err = wrapped.Save(raftpb.HardState{}, entries) require.Error(t, err) require.Contains(t, err.Error(), "refusing to encrypt") require.NoError(t, wrapped.Close()) // no entries are written at all ogWAL, err := OriginalWAL.Open(tempdir, snapshot) require.NoError(t, err) defer ogWAL.Close() _, _, ents, err := ogWAL.ReadAll() require.NoError(t, err) require.Empty(t, ents) }
// TestItemsAreInLongTermStorage - make sure that each tar file is // stored in our S3 test storage bucket, with correct metadata. func TestItemsAreInLongTermStorage(t *testing.T) { if !apt_testutil.ShouldRunIntegrationTests() { t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.") } _context, err := apt_testutil.GetContext("integration.json") require.Nil(t, err, "Could not create context") localClient, err := network.NewDPNRestClient( _context.Config.DPN.RestClient.LocalServiceURL, _context.Config.DPN.RestClient.LocalAPIRoot, _context.Config.DPN.RestClient.LocalAuthToken, _context.Config.DPN.LocalNode, _context.Config.DPN) require.Nil(t, err) // s3List lists bucket contents. s3List := apt_network.NewS3ObjectList( constants.AWSVirginia, _context.Config.DPN.DPNPreservationBucket, int64(100), ) // s3Head gets metadata about specific objects in S3/Glacier. s3Head := apt_network.NewS3Head(_context.Config.APTrustS3Region, _context.Config.DPN.DPNPreservationBucket) for _, identifier := range dpn_testutil.BAG_IDS { resp := localClient.DPNBagGet(identifier) dpnBag := resp.Bag() require.NotNil(t, dpnBag) if dpnBag.IngestNode == _context.Config.DPN.LocalNode { continue // we would not have replicated our own bag } tarFileName := fmt.Sprintf("%s.tar", identifier) s3List.GetList(tarFileName) require.NotEmpty(t, s3List.Response.Contents, "%s not found in S3", tarFileName) object := s3List.Response.Contents[0] fiveMinutesAgo := time.Now().UTC().Add(-5 * time.Minute) require.NotNil(t, object.LastModified) assert.True(t, object.LastModified.After(fiveMinutesAgo)) // Make sure each item has the expected metadata. // s3Head.Response.Metadata is map[string]*string. s3Head.Head(tarFileName) require.Empty(t, s3Head.ErrorMessage) metadata := s3Head.Response.Metadata require.NotNil(t, metadata) // Amazon library transforms first letters of keys to CAPS require.NotNil(t, metadata["From_node"]) require.NotNil(t, metadata["Transfer_id"]) require.NotNil(t, metadata["Member"]) require.NotNil(t, metadata["Local_id"]) require.NotNil(t, metadata["Version"]) assert.NotEmpty(t, *metadata["From_node"]) assert.NotEmpty(t, *metadata["Transfer_id"]) assert.NotEmpty(t, *metadata["Member"]) assert.NotEmpty(t, *metadata["Local_id"]) assert.NotEmpty(t, *metadata["Version"]) } }
func TestIntersectSorted6(t *testing.T) { input := []*task.List{ newList([]uint64{10, 12, 13}), newList([]uint64{2, 3, 4, 13}), newList([]uint64{4, 5, 6}), } require.Empty(t, IntersectSorted(input).Uids) }
func TestAddAdditionalEntriesToContextNoBuildPath(t *testing.T) { ctx := &types.Context{} command := builder.AddAdditionalEntriesToContext{} NoError(t, command.Run(ctx)) require.Empty(t, ctx.PreprocPath) require.Empty(t, ctx.SketchBuildPath) require.Empty(t, ctx.LibrariesBuildPath) require.Empty(t, ctx.CoreBuildPath) require.NotNil(t, ctx.WarningsLevel) require.True(t, ctx.CollectedSourceFiles.Empty()) require.Equal(t, 0, len(ctx.LibrariesResolutionResults)) }
func checkUids(t *testing.T, l *List, uids []uint64) { require.Equal(t, uids, listToArray(t, 0, l)) if len(uids) >= 3 { require.Equal(t, uids[1:], listToArray(t, 10, l), uids[1:]) require.Equal(t, []uint64{81}, listToArray(t, 80, l)) require.Empty(t, listToArray(t, 82, l)) } }
func (suite *MessageStoreTestSuite) TestMessageStore_GetMessages_EmptyStore() { // When messages are retrieved from an empty store messages, err := suite.msgStore.GetMessages(1, 2) require.Nil(suite.T(), err) // Then no messages should be returned require.Empty(suite.T(), messages, "Did not expect messages from empty store") }
func TestNoICU(t *testing.T) { disableICU = true tokenizer, err := NewTokenizer([]byte("hello world")) defer tokenizer.Destroy() require.NotNil(t, tokenizer) require.NoError(t, err) tokens := tokenizer.Tokens() require.Empty(t, tokens) }
// TestMustReceiveEmptyStringIfFilesAreEmpty we must receive empty string if file is empty func TestMustReceiveEmptyStringIfFilesAreEmpty(t *testing.T) { m, err := sentence.NewModule(sentence.ModuleConfig{ NounsFile: "emptyFile.txt", AdjectivesFile: "emptyFile.txt", }) require.Nil(t, err) s := m.GetService().GetRandomSentence(".") require.Empty(t, s) }
func TestShouldNotConsiderAppsWithoutPorts(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.Method == "GET" && r.RequestURI == "/v2/apps" { marathonApps := Apps{ Apps: []App{ App{ Id: "/dummy", Container: Container{ Docker: Docker{ Network: "BRIDGE", PortMappings: []PortMapping{}, }, }, }, }, } data, err := json.Marshal(marathonApps) if err != nil { log.Fatal("Error marshalling apps") } w.Write(data) return } if r.Method == "GET" && r.RequestURI == "/v2/tasks" { marathonTasks := Tasks{ Tasks: []Task{ Task{AppId: "/dummy", Host: "10.10.10.10", Ports: []int{10001}, ServicePorts: []int{31681}}, }, } data, err := json.Marshal(marathonTasks) if err != nil { log.Fatal("Error marshalling tasks") } w.Write(data) return } })) defer ts.Close() c := &http.Client{} generator := Generator{ httpClient: c, marathonServers: []string{ts.URL}, } services, _ := generator.Generate() require.Empty(t, services) }
func TestUpdateRequest(t *testing.T) { u, err := url.Parse("https://host.example/prefix/?foo=bar") require.Nil(t, err) client := NewClient(*u) client.DefaultHeaders.Set("X-Header", "123") req := client.NewRequest(t, "POST", "/user", nil) require.Equal(t, "https://host.example/prefix/user?foo=bar", req.URL.String()) require.Empty(t, req.RequestURI) }
func TestLoadPreviousBuildOptionsMapMissingFile(t *testing.T) { ctx := &types.Context{} buildPath := SetupBuildPath(t, ctx) defer os.RemoveAll(buildPath) command := builder.LoadPreviousBuildOptionsMap{} err := command.Run(ctx) NoError(t, err) require.Empty(t, ctx.BuildOptionsJsonPrevious) }
func TestFilter_reject(t *testing.T) { ch := capture.NewHandler() filter := New(ch) filter.Filter(func(e *event.Event) bool { return false }) logEvent := makeEvent(0) filter.Event(logEvent) require.Empty(t, ch.Events()) }
func TestExternalIncludeReplacerPragmaToInclude(t *testing.T) { ctx := &types.Context{} ctx.SourceGccMinusE = sourceWithPragmas ctx.Includes = []string{"/tmp/test184599776/sketch/config.h", "/tmp/test184599776/sketch/includes/de bug.h", "Bridge.h"} replacer := builder.ExternalIncludeReplacer{Source: &ctx.SourceGccMinusE, Target: &ctx.SourceGccMinusE, From: "#pragma ___MY_INCLUDE___ ", To: "#include "} err := replacer.Run(ctx) NoError(t, err) require.Empty(t, ctx.Source) require.Equal(t, sourceWithIncludes, ctx.SourceGccMinusE) }
func TestPeersGarbageCollection(t *testing.T) { const ( peer1NameString = "01:00:00:01:00:00" peer2NameString = "02:00:00:02:00:00" peer3NameString = "03:00:00:03:00:00" ) var ( peer1Name, _ = PeerNameFromString(peer1NameString) peer2Name, _ = PeerNameFromString(peer2NameString) peer3Name, _ = PeerNameFromString(peer3NameString) ) // Create some peers with some connections to each other p1, ps1 := newNode(peer1Name) p2, ps2 := newNode(peer2Name) p3, ps3 := newNode(peer3Name) ps1.AddTestConnection(p2) ps2.AddTestRemoteConnection(p1, p2) ps2.AddTestConnection(p1) ps2.AddTestConnection(p3) ps3.AddTestConnection(p1) ps1.AddTestConnection(p3) ps2.AddTestRemoteConnection(p1, p3) ps2.AddTestRemoteConnection(p3, p1) // Every peer is referenced, so nothing should be dropped require.Empty(t, garbageCollect(ps1), "peers removed") require.Empty(t, garbageCollect(ps2), "peers removed") require.Empty(t, garbageCollect(ps3), "peers removed") // Drop the connection from 2 to 3, and 3 isn't garbage-collected // because 1 has a connection to 3 ps2.DeleteTestConnection(p3) require.Empty(t, garbageCollect(ps2), "peers removed") // Drop the connection from 1 to 3, and 3 will get removed by // garbage-collection ps1.DeleteTestConnection(p3) checkPeerArray(t, garbageCollect(ps1), p3) }
func TestCreateAndGetChunkedArtifact(t *testing.T) { bucketName := "bucketName" ownerName := "ownerName" artifactName := "artifactName" if testing.Short() { t.Skip("Skipping end-to-end test in short mode.") } client := setup(t) var bucket *Bucket var artifact Artifact var err error bucket, err = client.NewBucket(bucketName, ownerName, 31) require.NotNil(t, bucket) require.NoError(t, err) artifact, err = bucket.NewChunkedArtifact(artifactName) require.NotNil(t, artifact) require.Equal(t, artifactName, artifact.GetArtifactModel().Name) require.Equal(t, model.APPENDING, artifact.GetArtifactModel().State) // require.Equal will crib if the types are not identical. require.Equal(t, int64(0), artifact.GetArtifactModel().Size) require.Equal(t, bucketName, artifact.GetArtifactModel().BucketId) require.Empty(t, artifact.GetArtifactModel().S3URL) require.NoError(t, err) artifact, err = bucket.GetArtifact(artifactName) require.NotNil(t, artifact) require.Equal(t, artifactName, artifact.GetArtifactModel().Name) require.Equal(t, model.APPENDING, artifact.GetArtifactModel().State) // require.Equal will crib if the types are not identical. require.Equal(t, int64(0), artifact.GetArtifactModel().Size) require.Equal(t, bucketName, artifact.GetArtifactModel().BucketId) require.Empty(t, artifact.GetArtifactModel().S3URL) require.NoError(t, err) }
func TestParseVariablesFragments(t *testing.T) { query := `{ "query": "query test($a: int){user(_uid_:0x0a) {...fragmentd,friends(first: $a, offset: $a) {name}}} fragment fragmentd {id(first: $a)}", "variables": {"$a": "5"} }` gq, _, err := Parse(query) require.NoError(t, err) require.NotNil(t, gq) require.Equal(t, childAttrs(gq), []string{"id", "friends"}) require.Empty(t, childAttrs(gq.Children[0])) require.Equal(t, childAttrs(gq.Children[1]), []string{"name"}) require.Equal(t, gq.Children[0].Args["first"], "5") }
func TestGenerateJWT(t *testing.T) { claims := &JWTClaims{ ExpiresAt: time.Now().Add(time.Hour), } j := RS256JWTStrategy{ PrivateKey: internal.MustRSAKey(), } token, sig, err := j.Generate(claims.ToMapClaims(), header) require.Nil(t, err, "%s", err) require.NotNil(t, token) sig, err = j.Validate(token) require.Nil(t, err, "%s", err) sig, err = j.Validate(token + "." + "0123456789") require.NotNil(t, err, "%s", err) partToken := strings.Split(token, ".")[2] sig, err = j.Validate(partToken) require.NotNil(t, err, "%s", err) // Reset private key j.PrivateKey = internal.MustRSAKey() // Lets validate the exp claim claims = &JWTClaims{ ExpiresAt: time.Now().Add(-time.Hour), } token, sig, err = j.Generate(claims.ToMapClaims(), header) require.Nil(t, err, "%s", err) require.NotNil(t, token) //t.Logf("%s.%s", token, sig) sig, err = j.Validate(token) require.NotNil(t, err, "%s", err) // Lets validate the nbf claim claims = &JWTClaims{ NotBefore: time.Now().Add(time.Hour), } token, sig, err = j.Generate(claims.ToMapClaims(), header) require.Nil(t, err, "%s", err) require.NotNil(t, token) //t.Logf("%s.%s", token, sig) sig, err = j.Validate(token) require.NotNil(t, err, "%s", err) require.Empty(t, sig, "%s", err) }
func TestCreateAndGetStreamedArtifact(t *testing.T) { bucketName := "bucketName" ownerName := "ownerName" artifactName := "artifactName" fileSize := int64(100) if testing.Short() { t.Skip("Skipping end-to-end test in short mode.") } client := setup(t) var bucket *Bucket var artifact Artifact var err error bucket, err = client.NewBucket(bucketName, ownerName, 31) require.NotNil(t, bucket) require.NoError(t, err) artifact, err = bucket.NewStreamedArtifact(artifactName, fileSize) require.NotNil(t, artifact) require.Equal(t, artifactName, artifact.GetArtifactModel().Name) require.Equal(t, model.WAITING_FOR_UPLOAD, artifact.GetArtifactModel().State) require.Equal(t, fileSize, artifact.GetArtifactModel().Size) require.Equal(t, bucketName, artifact.GetArtifactModel().BucketId) require.Empty(t, artifact.GetArtifactModel().S3URL) require.NoError(t, err) artifact, err = bucket.GetArtifact(artifactName) require.NotNil(t, artifact) require.Equal(t, artifactName, artifact.GetArtifactModel().Name) require.Equal(t, model.WAITING_FOR_UPLOAD, artifact.GetArtifactModel().State) require.Equal(t, fileSize, artifact.GetArtifactModel().Size) require.Equal(t, bucketName, artifact.GetArtifactModel().BucketId) require.Empty(t, artifact.GetArtifactModel().S3URL) require.NoError(t, err) }
func TestStringsParam(t *testing.T) { into := []string{} param := newStringsValue(&into, nil) param.Set("a") param.Set("b") require.Equal(t, []string{"a", "b"}, into) require.Equal(t, `["a", "b"]`, param.String()) param.Clear() require.Empty(t, into) }
func TestHead(t *testing.T) { // canTestS3, testBucket, testFile, testFileSize, // and testFileEtag are defined in s3_download_test if !canTestS3() { return } _context, err := apt_testutil.GetContext("integration.json") require.Nil(t, err, "Could not create context") client := network.NewS3Head(_context.Config.APTrustS3Region, testBucket) client.Head(testFile) require.Empty(t, client.ErrorMessage) assert.EqualValues(t, testFileSize, *client.Response.ContentLength) assert.Equal(t, testFileETag, *client.Response.ETag) assert.Equal(t, "application/x-tar", *client.Response.ContentType) }
func TestAddContacts(t *testing.T) { router := mux.NewRouter() router.HandleFunc("/", AddContact(&mockedContactList)).Methods("POST") router.HandleFunc("/", ListContacts(&mockedContactList)).Methods("GET") ts := httptest.NewServer(router) // We are expecting that our new contact is now in the list of contacts. expectContactList := append(mockedContactList, mockContact) _, body, errs := gorequest.New().Post(ts.URL).SendStruct(mockContact).End() require.Len(t, errs, 0) require.Empty(t, body) // Like in TestListContacts, this helper function makes an http request to ListContacts and validates its output. fetchAndTestContactList(t, ts, expectContactList) }
func TestParse_pass1(t *testing.T) { query := ` { me(_uid_:0x0a) { name, friends(xid:what) { # xid would be ignored. } } } ` gq, _, err := Parse(query) require.NoError(t, err) require.NotNil(t, gq) require.Equal(t, childAttrs(gq), []string{"name", "friends"}) require.Empty(t, childAttrs(gq.Children[1])) }