// TestReplicationsMarkedAsStored - make sure that the ReplicationTransfer
// records are marked with stored = true on the remote nodes.
func TestReplicationsMarkedAsStored(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := apt_testutil.GetContext("integration.json")
	require.Nil(t, err, "Could not create context")

	// Get a list of ReplicationTransfers where our local node
	// is the ToNode, and make sure we marked them all as stored
	// on the FromNode.
	localClient, err := network.NewDPNRestClient(
		_context.Config.DPN.RestClient.LocalServiceURL,
		_context.Config.DPN.RestClient.LocalAPIRoot,
		_context.Config.DPN.RestClient.LocalAuthToken,
		_context.Config.DPN.LocalNode,
		_context.Config.DPN)
	require.Nil(t, err)
	remoteClients, err := localClient.GetRemoteClients()
	require.Nil(t, err)

	xferParams := url.Values{}
	xferParams.Set("to_node", _context.Config.DPN.LocalNode)
	dpnResp := localClient.ReplicationTransferList(xferParams)
	require.Nil(t, dpnResp.Error)
	for _, xfer := range dpnResp.ReplicationTransfers() {
		remoteClient := remoteClients[xfer.FromNode]
		require.NotNil(t, remoteClient)
		resp := remoteClient.ReplicationTransferGet(xfer.ReplicationId)
		require.Nil(t, resp.Error)
		remoteXfer := resp.ReplicationTransfer()
		require.NotNil(t, remoteXfer)
		assert.True(t, remoteXfer.Stored)
	}
}
// TestItemsAreInLongTermStorage - make sure that each tar file is
// stored in our S3 test storage bucket, with correct metadata.
func TestItemsAreInLongTermStorage(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := apt_testutil.GetContext("integration.json")
	require.Nil(t, err, "Could not create context")

	localClient, err := network.NewDPNRestClient(
		_context.Config.DPN.RestClient.LocalServiceURL,
		_context.Config.DPN.RestClient.LocalAPIRoot,
		_context.Config.DPN.RestClient.LocalAuthToken,
		_context.Config.DPN.LocalNode,
		_context.Config.DPN)
	require.Nil(t, err)

	// s3List lists bucket contents.
	s3List := apt_network.NewS3ObjectList(
		constants.AWSVirginia,
		_context.Config.DPN.DPNPreservationBucket,
		int64(100),
	)
	// s3Head gets metadata about specific objects in S3/Glacier.
	s3Head := apt_network.NewS3Head(_context.Config.APTrustS3Region,
		_context.Config.DPN.DPNPreservationBucket)

	for _, identifier := range dpn_testutil.BAG_IDS {
		resp := localClient.DPNBagGet(identifier)
		dpnBag := resp.Bag()
		require.NotNil(t, dpnBag)
		if dpnBag.IngestNode == _context.Config.DPN.LocalNode {
			continue // we would not have replicated our own bag
		}
		tarFileName := fmt.Sprintf("%s.tar", identifier)
		s3List.GetList(tarFileName)
		require.NotEmpty(t, s3List.Response.Contents, "%s not found in S3", tarFileName)
		object := s3List.Response.Contents[0]
		fiveMinutesAgo := time.Now().UTC().Add(-5 * time.Minute)
		require.NotNil(t, object.LastModified)
		assert.True(t, object.LastModified.After(fiveMinutesAgo))

		// Make sure each item has the expected metadata.
		// s3Head.Response.Metadata is map[string]*string.
		s3Head.Head(tarFileName)
		require.Empty(t, s3Head.ErrorMessage)
		metadata := s3Head.Response.Metadata
		require.NotNil(t, metadata)
		// Amazon library transforms first letters of keys to CAPS
		require.NotNil(t, metadata["From_node"])
		require.NotNil(t, metadata["Transfer_id"])
		require.NotNil(t, metadata["Member"])
		require.NotNil(t, metadata["Local_id"])
		require.NotNil(t, metadata["Version"])

		assert.NotEmpty(t, *metadata["From_node"])
		assert.NotEmpty(t, *metadata["Transfer_id"])
		assert.NotEmpty(t, *metadata["Member"])
		assert.NotEmpty(t, *metadata["Local_id"])
		assert.NotEmpty(t, *metadata["Version"])
	}
}
// TestIngestStoreTarFilesDeleted tests whether all expected DPN bags
// (tar files) have been deleted from the staging area.
func TestIngestStoreTarFilesDeleted(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := apt_testutil.GetContext("integration.json")
	require.Nil(t, err)
	pattern := filepath.Join(_context.Config.DPN.StagingDirectory, "test.edu", "*.tar")
	files, err := filepath.Glob(pattern)
	require.Nil(t, err)
	assert.Equal(t, 0, len(files))
}
// TestIngestStoreItemsAreInStorage makes sure that the items we sent off
// to long-term storage in AWS actually made it there.
func TestIngestStoreItemsAreInStorage(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := apt_testutil.GetContext("integration.json")
	require.Nil(t, err, "Could not create context")
	maxItemsToList := int64(1)
	// s3List lists bucket contents.
	s3List := network.NewS3ObjectList(
		constants.AWSVirginia,
		_context.Config.DPN.DPNPreservationBucket,
		maxItemsToList)
	// s3Head gets metadata about specific objects in S3/Glacier.
	s3Head := network.NewS3Head(_context.Config.APTrustS3Region,
		_context.Config.DPN.DPNPreservationBucket)

	pathToLogFile := filepath.Join(_context.Config.LogDirectory, "dpn_ingest_store.json")
	for _, s3Key := range apt_testutil.INTEGRATION_GOOD_BAGS[0:7] {
		parts := strings.Split(s3Key, "/")
		localTarFileName := parts[1] // APTrust bag name. E.g. "test.edu.test_123.tar"
		manifest, err := apt_testutil.FindDPNIngestManifestInLog(pathToLogFile, localTarFileName)
		require.Nil(t, err, "Could not find JSON record for %s", localTarFileName)
		parts = strings.Split(manifest.StorageURL, "/")
		dpnTarFileName := parts[len(parts)-1] // DPN bag name: <uuid>.tar
		s3List.GetList(dpnTarFileName)
		require.Empty(t, s3List.ErrorMessage)
		require.EqualValues(t, 1, len(s3List.Response.Contents), "Nothing in S3 for %s", dpnTarFileName)
		obj := s3List.Response.Contents[0]
		assert.Equal(t, dpnTarFileName, *obj.Key)

		// Make sure each item has the expected metadata.
		// s3Head.Response.Metadata is map[string]*string.
		s3Head.Head(dpnTarFileName)
		require.Empty(t, s3Head.ErrorMessage)
		metadata := s3Head.Response.Metadata
		require.NotNil(t, metadata, dpnTarFileName)
		// Notice the Amazon library transforms the first letter of
		// all our keys to upper case. WTF?
		require.NotNil(t, metadata["From_node"], dpnTarFileName)
		require.NotNil(t, metadata["Transfer_id"], dpnTarFileName)
		require.NotNil(t, metadata["Member"], dpnTarFileName)
		require.NotNil(t, metadata["Local_id"], dpnTarFileName)
		require.NotNil(t, metadata["Version"], dpnTarFileName)

		assert.NotEmpty(t, *metadata["From_node"], dpnTarFileName)
		assert.NotEmpty(t, *metadata["Transfer_id"], dpnTarFileName)
		assert.NotEmpty(t, *metadata["Member"], dpnTarFileName)
		assert.NotEmpty(t, *metadata["Local_id"], dpnTarFileName)
		assert.NotEmpty(t, *metadata["Version"], dpnTarFileName)
	}
}
func TestItemsCopiedToStaging(t *testing.T) {
	// Make sure that each of the expected bags has shown
	// up in our test staging area.
	if !testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	for i := 2; i <= 5; i++ {
		_context, err := testutil.GetContext("integration.json")
		require.Nil(t, err, "Could not create context")
		filename := fmt.Sprintf("00000000-0000-4000-a000-00000000000%d.tar", i)
		path := filepath.Join(_context.Config.DPN.StagingDirectory, filename)
		assert.True(t, fileutil.FileExists(path), "File %s was not copied", path)
	}
}
// TestIngestStoreWorkItems checks to see if Pharos WorkItems
// were updated correctly.
func TestIngestStoreWorkItems(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_, workItems, err := dpn_testutil.GetDPNWorkItems()
	require.Nil(t, err)
	for _, item := range workItems {
		assert.Equal(t, constants.StageRecord, item.Stage)
		assert.Equal(t, constants.StatusPending, item.Status)
		assert.Equal(t, "Bag copied to long-term storage", item.Note)
		assert.Equal(t, "", item.Node)
		assert.Equal(t, 0, item.Pid)
		assert.True(t, item.Retry)
	}
}
// TestIngestRecordWorkItems checks to see if Pharos WorkItems
// were updated correctly.
func TestIngestRecordWorkItems(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_, workItems, err := dpn_testutil.GetDPNWorkItems()
	require.Nil(t, err)
	for _, item := range workItems {
		assert.Equal(t, constants.StageResolve, item.Stage)
		assert.Equal(t, constants.StatusSuccess, item.Status)
		assert.Equal(t, "DPN ingest complete", item.Note)
		assert.Equal(t, "", item.Node)
		assert.Equal(t, 0, item.Pid)
		assert.True(t, item.Retry)
	}
}
func TestInstutionsCached(t *testing.T) {
	if !testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	expected, actual := getBucketReaderOutputs(t)
	testInstCache(t, expected, actual)
	testWorkItemsCached(t, expected, actual)
	testWorkItemsFetched(t, expected, actual)
	testWorkItemsCreated(t, expected, actual)
	testWorkItemsQueued(t, expected, actual)
	testWorkItemsMarkedAsQueued(t, expected, actual)
	testS3Items(t, expected, actual)
	testErrors(t, expected, actual)
	testWarnings(t, expected, actual)
}
// TestIngestRecordJsonLog checks that all expected entries are present
// in the dpn_ingest_record.json log.
func TestIngestRecordJsonLog(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := apt_testutil.GetContext("integration.json")
	require.Nil(t, err)
	pathToLogFile := filepath.Join(_context.Config.LogDirectory, "dpn_ingest_record.json")
	for _, s3Key := range apt_testutil.INTEGRATION_GOOD_BAGS[0:7] {
		parts := strings.Split(s3Key, "/")
		tarFileName := parts[1]
		manifest, err := apt_testutil.FindDPNIngestManifestInLog(pathToLogFile, tarFileName)
		require.Nil(t, err)
		require.NotNil(t, manifest)
		detail := fmt.Sprintf("%s from JSON log", tarFileName)
		testIngestRecordManifest(t, _context, manifest, detail)
	}
}
// TestInteObjDPNUUID makes sure the DPN UUID was set on the
// IntellectualObject record in Pharos.
func TestInteObjDPNUUID(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := apt_testutil.GetContext("integration.json")
	require.Nil(t, err)
	for _, s3Key := range apt_testutil.INTEGRATION_GOOD_BAGS[0:7] {
		tar := strings.Replace(s3Key, "aptrust.receiving.test.", "", 1)
		objIdentifier := strings.Replace(tar, ".tar", "", 1)
		resp := _context.PharosClient.IntellectualObjectGet(objIdentifier, false, false)
		require.Nil(t, resp.Error)
		obj := resp.IntellectualObject()
		require.NotNil(t, obj)
		// DPNUUID is null in fixture data. It should be set after DPN ingest.
		assert.True(t, util.LooksLikeUUID(obj.DPNUUID))
	}
}
// TestPackageCleanup checks to see whether dpn_package cleaned up
// all of the intermediate files created during the bag building
// process. Those are directories containing untarred bags.
func TestPackageCleanup(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := apt_testutil.GetContext("integration.json")
	require.Nil(t, err)
	pattern := filepath.Join(_context.Config.DPN.StagingDirectory, "test.edu", "*")
	files, err := filepath.Glob(pattern)
	require.Nil(t, err)

	// Only the 7 tar file should remain. The 7 working directories
	// should have been deleted. If anything other than a tar file
	// remains, some part of cleanup failed.
	assert.Equal(t, 7, len(files))
	for _, file := range files {
		assert.True(t, strings.HasSuffix(file, ".tar"))
	}
}
// TestIngestStoreItemsQueued checks to see if dpn_ingest_store pushed items
// into the dpn_ingest_record NSQ topic.
func TestIngestStoreItemsQueued(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := apt_testutil.GetContext("integration.json")
	require.Nil(t, err, "Could not create context")
	stats, err := _context.NSQClient.GetStats()
	require.Nil(t, err)
	foundTopic := false
	for _, topic := range stats.Data.Topics {
		if topic.TopicName == _context.Config.DPN.DPNIngestRecordWorker.NsqTopic {
			// All 7 stored bags should show up in the record queue
			foundTopic = true
			assert.EqualValues(t, uint64(7), topic.MessageCount)
		}
	}
	assert.True(t, foundTopic, "Nothing was queued in %s",
		_context.Config.DPN.DPNIngestRecordWorker.NsqTopic)
}
// test_push_to_dpn is for integration testing only.
// It creates a few WorkItems in Pharos asking that
// a handful of bags be pushed to DPN. Subsequent
// integration tests (such as dpn_queue) depend on
// the WorkItems created by this test.
func TestPushToDPN(t *testing.T) {
	if !testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	configFile := filepath.Join("config", "integration.json")
	config, err := models.LoadConfigFile(configFile)
	require.Nil(t, err)
	_context := context.NewContext(config)
	for _, s3Key := range testutil.INTEGRATION_GOOD_BAGS[0:7] {
		identifier := strings.Replace(s3Key, "aptrust.receiving.test.", "", 1)
		identifier = strings.Replace(identifier, ".tar", "", 1)
		resp := _context.PharosClient.IntellectualObjectPushToDPN(identifier)
		workItem := resp.WorkItem()
		require.Nil(t, resp.Error)
		require.NotNil(t, workItem)
		_context.MessageLog.Info("Created DPN work item #%d for %s",
			workItem.Id, workItem.ObjectIdentifier)
	}
}
func TestValidItemInStorageQueue(t *testing.T) {
	// Make sure the copied DPN bags made it into the validation queue.
	if !testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := testutil.GetContext("integration.json")
	require.Nil(t, err, "Could not create context")
	stats, err := _context.NSQClient.GetStats()
	require.Nil(t, err)
	foundTopic := false
	for _, topic := range stats.Data.Topics {
		if topic.TopicName == _context.Config.DPN.DPNReplicationStoreWorker.NsqTopic {
			// All 4 of the valid bags should appear in the store queue.
			foundTopic = true
			assert.EqualValues(t, uint64(4), topic.MessageCount)
		}
	}
	assert.True(t, foundTopic, "Nothing was queued in %s",
		_context.Config.DPN.DPNReplicationStoreWorker.NsqTopic)
}
// TestIngestRecordWorkItemState checks to see if Pharos WorkItemState
// records were updated correctly.
func TestIngestRecordWorkItemState(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	// This actually retrieves DPN related WorkItems, not DPNWorkItems. Hmm...
	_context, workItems, err := dpn_testutil.GetDPNWorkItems()
	require.Nil(t, err)
	for _, item := range workItems {
		require.NotNil(t, item.WorkItemStateId)
		resp := _context.PharosClient.WorkItemStateGet(*item.WorkItemStateId)
		require.Nil(t, resp.Error)
		workItemState := resp.WorkItemState()
		require.NotNil(t, workItemState)
		assert.Equal(t, constants.ActionDPN, workItemState.Action)
		assert.False(t, workItemState.CreatedAt.IsZero())
		assert.False(t, workItemState.UpdatedAt.IsZero())
		detail := fmt.Sprintf("%s from Pharos", item.ObjectIdentifier)
		testIngestRecordWorkItemState(t, _context, workItemState.State, detail)
	}
}
func TestCopyResultSentToRemoteNodes(t *testing.T) {
	// Query the FromNode of each copied bag to make sure that
	// we sent a fixity value back to the ingest node.
	if !testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := testutil.GetContext("integration.json")
	require.Nil(t, err, "Could not create context")
	dpnClient, err := network.NewDPNRestClient(
		_context.Config.DPN.RestClient.LocalServiceURL,
		_context.Config.DPN.RestClient.LocalAPIRoot,
		_context.Config.DPN.RestClient.LocalAuthToken,
		_context.Config.DPN.LocalNode,
		_context.Config.DPN)
	require.Nil(t, err, "Couldn't build DPN REST client: %v", err)

	remoteClients, err := dpnClient.GetRemoteClients()
	require.Nil(t, err, "Couldn't build remote DPN clients: %v", err)

	// These identifiers are in the fixture data for dpn-server.
	// Key is the FromNode, value is the ReplicationId
	xferIdentifiers := map[string]string{
		"chron": "20000000-0000-4000-a000-000000000007",
		"hathi": "30000000-0000-4000-a000-000000000001",
		"sdr":   "40000000-0000-4000-a000-000000000013",
		"tdr":   "50000000-0000-4000-a000-000000000019",
	}

	for fromNode, identifier := range xferIdentifiers {
		client := remoteClients[fromNode]
		require.NotNil(t, client, "No DPN REST client for %s", fromNode)
		resp := client.ReplicationTransferGet(identifier)
		require.Nil(t, resp.Error)
		xfer := resp.ReplicationTransfer()
		require.NotNil(t, xfer, "ReplicationTransfer %s is missing", identifier)
		assert.NotEmpty(t, xfer.FixityValue, "Empty fixity for %s", identifier)
		assert.True(t, xfer.StoreRequested, "StoreRequested should not be false for %s", identifier)
	}
}
// TestDPNWorkItemsCompleted - make sure DPNWorkItems are marked as
// completed in Pharos.
func TestDPNWorkItemsCompleted(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := apt_testutil.GetContext("integration.json")
	require.Nil(t, err, "Could not create context")
	params := url.Values{}
	params.Set("task", "replication")
	params.Set("page", "1")
	params.Set("per_page", "100")
	resp := _context.PharosClient.DPNWorkItemList(params)
	require.Nil(t, resp.Error)
	for _, item := range resp.DPNWorkItems() {
		require.NotNil(t, item.QueuedAt, "QueuedAt nil for %s", item.Identifier)
		require.NotNil(t, item.QueuedAt, "CompletedAt nil for %s", item.Identifier)
		require.NotNil(t, item.State, "State nil for %s", item.Identifier)
		assert.False(t, item.QueuedAt.IsZero(), "QueuedAt is zero for %s", item.Identifier)
		assert.False(t, item.CompletedAt.IsZero(), "CompletedAt is zero for %s", item.Identifier)
		assert.Equal(t, "Bag copied to long-term storage", *item.Note, "Note is incorrect for %s", item.Identifier)
		assert.True(t, len(*item.State) > 250, "State JSON too short for %s", item.Identifier)
	}
}
func TestFetchResults(t *testing.T) {
	if !testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	// Load config
	configFile := filepath.Join("config", "integration.json")
	config, err := models.LoadConfigFile(configFile)
	require.Nil(t, err)
	config.ExpandFilePaths()

	// Find the log file that apt_fetch created when it was running
	// with the "config/integration.json" config options. We'll read
	// that file.
	pathToJsonLog := filepath.Join(config.LogDirectory, "apt_fetch.json")
	for _, bagName := range testutil.INTEGRATION_GOOD_BAGS {
		ingestManifest, err := testutil.FindIngestManifestInLog(pathToJsonLog, bagName)
		assert.Nil(t, err)
		if err != nil {
			continue
		}
		fetcherTestCommon(t, bagName, ingestManifest)
		// TODO: Test WorkItem (stage, status, etc.) below
		fetcherTestGoodBagResult(t, bagName, ingestManifest)
		if bagName == "aptrust.receiving.test.test.edu/example.edu.tagsample_good.tar" {
			fetcherTestSpecifics(t, ingestManifest)
		}
	}
	for _, bagName := range testutil.INTEGRATION_BAD_BAGS {
		ingestManifest, err := testutil.FindIngestManifestInLog(pathToJsonLog, bagName)
		assert.Nil(t, err)
		if err != nil {
			continue
		}
		fetcherTestCommon(t, bagName, ingestManifest)
		// TODO: Test WorkItem (stage, status, etc.) below
		fetcherTestBadBagResult(t, bagName, ingestManifest)
	}
}
func TestValidationDPNWorkItems(t *testing.T) {
	// Make sure that our code updated the DPNWorkItem record for each
	// ReplicationTransfer.
	if !testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := testutil.GetContext("integration.json")
	require.Nil(t, err, "Could not create context")
	params := url.Values{}
	params.Set("task", "replication")
	params.Set("page", "1")
	params.Set("per_page", "100")
	resp := _context.PharosClient.DPNWorkItemList(params)
	require.Nil(t, resp.Error)
	for _, item := range resp.DPNWorkItems() {
		require.NotNil(t, item.QueuedAt, "QueuedAt nil for %s", item.Identifier)
		require.NotNil(t, item.Note, "Note nil for %s", item.Identifier)
		require.NotNil(t, item.State, "State nil for %s", item.Identifier)
		assert.False(t, item.QueuedAt.IsZero(), "QueuedAt is zero for %s", item.Identifier)
		assert.Equal(t, "Bag passed validation", *item.Note, "Note is incorrect for %s", item.Identifier)
		assert.True(t, len(*item.State) > 250, "State JSON too short for %s", item.Identifier)
	}
}
func TestStoreResults(t *testing.T) {
	if !testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	// Load config
	configFile := filepath.Join("config", "integration.json")
	config, err := models.LoadConfigFile(configFile)
	require.Nil(t, err)
	config.ExpandFilePaths()

	// Find the log file that apt_store created when it was running
	// with the "config/integration.json" config options. We'll read
	// that file.
	pathToJsonLog := filepath.Join(config.LogDirectory, "apt_store.json")
	for _, bagName := range testutil.INTEGRATION_GOOD_BAGS {
		ingestManifest, err := testutil.FindIngestManifestInLog(pathToJsonLog, bagName)
		assert.Nil(t, err)
		if err != nil {
			continue
		}
		// TODO: Test WorkItem (stage, status, etc.) below
		storeTestCommon(t, bagName, ingestManifest, config)
	}
}
// We should have created one WorkItem for each DPN ingest request.
func TestWorkItemsCreatedAndQueued(t *testing.T) {
	if !testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	expectedIdentifiers := identifiersPushedToDPN()
	_context, err := testutil.GetContext("integration.json")
	require.Nil(t, err, "Could not create context")
	params := url.Values{}
	params.Set("item_action", "DPN")
	params.Set("page", "1")
	params.Set("per_page", "100")
	resp := _context.PharosClient.WorkItemList(params)
	require.Nil(t, resp.Error)
	assert.Equal(t, len(expectedIdentifiers), resp.Count)
	for _, workItem := range resp.WorkItems() {
		found := false
		queued := false
		currentIdentifier := ""
		for _, identifier := range expectedIdentifiers {
			currentIdentifier = identifier
			if workItem.ObjectIdentifier == identifier {
				found = true
				if workItem.QueuedAt != nil && !workItem.QueuedAt.IsZero() {
					queued = true
				}
				break
			}
		}
		assert.True(t, found, "No WorkItem for object %s", currentIdentifier)
		assert.True(t, queued, "Object %s was not queued", currentIdentifier)
	}

	// In addition to checking whether Pharos thinks the items are queued,
	// let's ask NSQ as well.
	stats, err := _context.NSQClient.GetStats()
	require.Nil(t, err)
	foundFetchTopic := false
	foundStoreTopic := false
	foundRecordTopic := false
	for _, topic := range stats.Data.Topics {
		if topic.TopicName == _context.Config.FetchWorker.NsqTopic {
			// We fetch 16 bags in our integration tests.
			// They're not all valid, but we should have that many in the queue.
			foundFetchTopic = true
			assert.EqualValues(t, uint64(16), topic.MessageCount)
		} else if topic.TopicName == _context.Config.StoreWorker.NsqTopic {
			// All of the 11 valid bags should have made it into the store topic.
			foundStoreTopic = true
			assert.EqualValues(t, uint64(11), topic.MessageCount)
		} else if topic.TopicName == _context.Config.RecordWorker.NsqTopic {
			// All of the 11 valid bags should have made it into the record topic.
			foundRecordTopic = true
			assert.EqualValues(t, uint64(11), topic.MessageCount)
		}
	}
	assert.True(t, foundFetchTopic, "Nothing was queued in %s",
		_context.Config.FetchWorker.NsqTopic)
	assert.True(t, foundStoreTopic, "Nothing was queued in %s",
		_context.Config.StoreWorker.NsqTopic)
	assert.True(t, foundRecordTopic, "Nothing was queued in %s",
		_context.Config.RecordWorker.NsqTopic)
}
// We should have created one DPNWorkItem for each replication request
// that we synched from the other nodes.
func TestDPNWorkItemsCreatedAndQueued(t *testing.T) {
	if !testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	// Use local DPN REST client to check for all replication requests
	// where ToNode is our LocalNode.
	//
	// Then check Pharos for a DPNWorkItem for each of these replications.
	// The DPNWorkItem should exist, and should have a QueuedAt timestamp.
	_context, err := testutil.GetContext("integration.json")
	require.Nil(t, err, "Could not create context")

	// Check DPNWorkItems for ReplicationTransfers
	dpnClient, err := network.NewDPNRestClient(
		_context.Config.DPN.RestClient.LocalServiceURL,
		_context.Config.DPN.RestClient.LocalAPIRoot,
		_context.Config.DPN.RestClient.LocalAuthToken,
		_context.Config.DPN.LocalNode,
		_context.Config.DPN)
	require.Nil(t, err)
	xferParams := url.Values{}
	xferParams.Set("to_node", _context.Config.DPN.LocalNode)
	dpnResp := dpnClient.ReplicationTransferList(xferParams)
	require.Nil(t, dpnResp.Error)
	for _, xfer := range dpnResp.ReplicationTransfers() {
		params := url.Values{}
		params.Set("identifier", xfer.ReplicationId)
		params.Set("task", "replication")
		pharosResp := _context.PharosClient.DPNWorkItemList(params)
		require.Nil(t, pharosResp.Error)
		require.Equal(t, 1, pharosResp.Count)
		dpnWorkItem := pharosResp.DPNWorkItem()
		require.NotNil(t, dpnWorkItem.QueuedAt)
		assert.False(t, dpnWorkItem.QueuedAt.IsZero())
		assert.Equal(t, xfer.FromNode, dpnWorkItem.RemoteNode)
	}

	// Check DPNWorkItems RestoreTransfers
	xferParams.Set("from_node", _context.Config.DPN.LocalNode)
	dpnResp = dpnClient.RestoreTransferList(xferParams)
	require.Nil(t, dpnResp.Error)
	for _, xfer := range dpnResp.RestoreTransfers() {
		params := url.Values{}
		params.Set("identifier", xfer.RestoreId)
		params.Set("task", "restore")
		pharosResp := _context.PharosClient.DPNWorkItemList(params)
		require.Nil(t, pharosResp.Error)
		require.Equal(t, 1, pharosResp.Count)
		dpnWorkItem := pharosResp.DPNWorkItem()
		require.NotNil(t, dpnWorkItem.QueuedAt)
		assert.False(t, dpnWorkItem.QueuedAt.IsZero())
		assert.Equal(t, xfer.ToNode, dpnWorkItem.RemoteNode)
	}

	// Check NSQ as well.
	stats, err := _context.NSQClient.GetStats()
	require.Nil(t, err)
	foundPackageTopic := false
	foundCopyTopic := false
	foundRestoreTopic := false
	for _, topic := range stats.Data.Topics {
		if topic.TopicName == _context.Config.DPN.DPNPackageWorker.NsqTopic {
			// apps/test_push_to_dpn.go requests that items
			// testutil.INTEGRATION_GOOD_BAGS[0:7] be sent to DPN,
			// so we should find seven items in the package queue
			foundPackageTopic = true
			assert.EqualValues(t, uint64(7), topic.MessageCount)
		} else if topic.TopicName == _context.Config.DPN.DPNCopyWorker.NsqTopic {
			// Fixture data has 4 replications: one from each remote node
			foundCopyTopic = true
			assert.EqualValues(t, uint64(4), topic.MessageCount)
		} else if topic.TopicName == _context.Config.DPN.DPNRestoreWorker.NsqTopic {
			// Fixture data has 4 restores: one from each remote node
			foundRestoreTopic = true
			assert.EqualValues(t, uint64(4), topic.MessageCount)
		}
	}
	assert.True(t, foundPackageTopic, "Nothing was queued in %s",
		_context.Config.DPN.DPNPackageWorker.NsqTopic)
	assert.True(t, foundCopyTopic, "Nothing was queued in %s",
		_context.Config.DPN.DPNCopyWorker.NsqTopic)
	assert.True(t, foundRestoreTopic, "Nothing was queued in %s",
		_context.Config.DPN.DPNRestoreWorker.NsqTopic)
}