// TestItemsAreInLongTermStorage - make sure that each tar file is
// stored in our S3 test storage bucket, with correct metadata.
func TestItemsAreInLongTermStorage(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := apt_testutil.GetContext("integration.json")
	require.Nil(t, err, "Could not create context")

	localClient, err := network.NewDPNRestClient(
		_context.Config.DPN.RestClient.LocalServiceURL,
		_context.Config.DPN.RestClient.LocalAPIRoot,
		_context.Config.DPN.RestClient.LocalAuthToken,
		_context.Config.DPN.LocalNode,
		_context.Config.DPN)
	require.Nil(t, err)

	// s3List lists bucket contents.
	s3List := apt_network.NewS3ObjectList(
		constants.AWSVirginia,
		_context.Config.DPN.DPNPreservationBucket,
		int64(100),
	)
	// s3Head gets metadata about specific objects in S3/Glacier.
	s3Head := apt_network.NewS3Head(_context.Config.APTrustS3Region,
		_context.Config.DPN.DPNPreservationBucket)

	for _, identifier := range dpn_testutil.BAG_IDS {
		resp := localClient.DPNBagGet(identifier)
		dpnBag := resp.Bag()
		require.NotNil(t, dpnBag)
		if dpnBag.IngestNode == _context.Config.DPN.LocalNode {
			continue // we would not have replicated our own bag
		}
		tarFileName := fmt.Sprintf("%s.tar", identifier)
		s3List.GetList(tarFileName)
		require.NotEmpty(t, s3List.Response.Contents, "%s not found in S3", tarFileName)
		object := s3List.Response.Contents[0]
		fiveMinutesAgo := time.Now().UTC().Add(-5 * time.Minute)
		require.NotNil(t, object.LastModified)
		assert.True(t, object.LastModified.After(fiveMinutesAgo))

		// Make sure each item has the expected metadata.
		// s3Head.Response.Metadata is map[string]*string.
		s3Head.Head(tarFileName)
		require.Empty(t, s3Head.ErrorMessage)
		metadata := s3Head.Response.Metadata
		require.NotNil(t, metadata)
		// Amazon library transforms first letters of keys to CAPS
		require.NotNil(t, metadata["From_node"])
		require.NotNil(t, metadata["Transfer_id"])
		require.NotNil(t, metadata["Member"])
		require.NotNil(t, metadata["Local_id"])
		require.NotNil(t, metadata["Version"])

		assert.NotEmpty(t, *metadata["From_node"])
		assert.NotEmpty(t, *metadata["Transfer_id"])
		assert.NotEmpty(t, *metadata["Member"])
		assert.NotEmpty(t, *metadata["Local_id"])
		assert.NotEmpty(t, *metadata["Version"])
	}
}
// TestIngestStoreItemsAreInStorage makes sure that the items we sent off
// to long-term storage in AWS actually made it there.
func TestIngestStoreItemsAreInStorage(t *testing.T) {
	if !apt_testutil.ShouldRunIntegrationTests() {
		t.Skip("Skipping integration test. Set ENV var RUN_EXCHANGE_INTEGRATION=true if you want to run them.")
	}
	_context, err := apt_testutil.GetContext("integration.json")
	require.Nil(t, err, "Could not create context")
	maxItemsToList := int64(1)
	// s3List lists bucket contents.
	s3List := network.NewS3ObjectList(
		constants.AWSVirginia,
		_context.Config.DPN.DPNPreservationBucket,
		maxItemsToList)
	// s3Head gets metadata about specific objects in S3/Glacier.
	s3Head := network.NewS3Head(_context.Config.APTrustS3Region,
		_context.Config.DPN.DPNPreservationBucket)

	pathToLogFile := filepath.Join(_context.Config.LogDirectory, "dpn_ingest_store.json")
	for _, s3Key := range apt_testutil.INTEGRATION_GOOD_BAGS[0:7] {
		parts := strings.Split(s3Key, "/")
		localTarFileName := parts[1] // APTrust bag name. E.g. "test.edu.test_123.tar"
		manifest, err := apt_testutil.FindDPNIngestManifestInLog(pathToLogFile, localTarFileName)
		require.Nil(t, err, "Could not find JSON record for %s", localTarFileName)
		parts = strings.Split(manifest.StorageURL, "/")
		dpnTarFileName := parts[len(parts)-1] // DPN bag name: <uuid>.tar
		s3List.GetList(dpnTarFileName)
		require.Empty(t, s3List.ErrorMessage)
		require.EqualValues(t, 1, len(s3List.Response.Contents), "Nothing in S3 for %s", dpnTarFileName)
		obj := s3List.Response.Contents[0]
		assert.Equal(t, dpnTarFileName, *obj.Key)

		// Make sure each item has the expected metadata.
		// s3Head.Response.Metadata is map[string]*string.
		s3Head.Head(dpnTarFileName)
		require.Empty(t, s3Head.ErrorMessage)
		metadata := s3Head.Response.Metadata
		require.NotNil(t, metadata, dpnTarFileName)
		// Notice the Amazon library transforms the first letter of
		// all our keys to upper case. WTF?
		require.NotNil(t, metadata["From_node"], dpnTarFileName)
		require.NotNil(t, metadata["Transfer_id"], dpnTarFileName)
		require.NotNil(t, metadata["Member"], dpnTarFileName)
		require.NotNil(t, metadata["Local_id"], dpnTarFileName)
		require.NotNil(t, metadata["Version"], dpnTarFileName)

		assert.NotEmpty(t, *metadata["From_node"], dpnTarFileName)
		assert.NotEmpty(t, *metadata["Transfer_id"], dpnTarFileName)
		assert.NotEmpty(t, *metadata["Member"], dpnTarFileName)
		assert.NotEmpty(t, *metadata["Local_id"], dpnTarFileName)
		assert.NotEmpty(t, *metadata["Version"], dpnTarFileName)
	}
}
Пример #3
0
func TestHead(t *testing.T) {
	// canTestS3, testBucket, testFile, testFileSize,
	// and testFileEtag are defined in s3_download_test
	if !canTestS3() {
		return
	}
	_context, err := apt_testutil.GetContext("integration.json")
	require.Nil(t, err, "Could not create context")
	client := network.NewS3Head(_context.Config.APTrustS3Region, testBucket)
	client.Head(testFile)
	require.Empty(t, client.ErrorMessage)
	assert.EqualValues(t, testFileSize, *client.Response.ContentLength)
	assert.Equal(t, testFileETag, *client.Response.ETag)
	assert.Equal(t, "application/x-tar", *client.Response.ContentType)
}