Example #1
0
func TestGitAndRootDirs(t *testing.T) {
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
	}()

	git, root, err := GitAndRootDirs()
	if err != nil {
		t.Fatal(err)
	}

	expected, err := os.Stat(git)
	if err != nil {
		t.Fatal(err)
	}

	actual, err := os.Stat(filepath.Join(root, ".git"))
	if err != nil {
		t.Fatal(err)
	}

	assert.True(t, os.SameFile(expected, actual))
}
Example #2
0
func TestCurrentRefAndCurrentRemoteRef(t *testing.T) {
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
	}()

	// test commits; we'll just modify the same file each time since we're
	// only interested in branches
	inputs := []*test.CommitInput{
		{ // 0
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 20},
			},
		},
		{ // 1
			NewBranch: "branch2",
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 25},
			},
		},
		{ // 2
			ParentBranches: []string{"master"}, // back on master
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 30},
			},
		},
		{ // 3
			NewBranch: "branch3",
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 32},
			},
		},
	}
	outputs := repo.AddCommits(inputs)
	// last commit was on branch3
	ref, err := CurrentRef()
	assert.Equal(t, nil, err)
	assert.Equal(t, &Ref{"branch3", RefTypeLocalBranch, outputs[3].Sha}, ref)
	test.RunGitCommand(t, true, "checkout", "master")
	ref, err = CurrentRef()
	assert.Equal(t, nil, err)
	assert.Equal(t, &Ref{"master", RefTypeLocalBranch, outputs[2].Sha}, ref)
	// Check remote
	repo.AddRemote("origin")
	test.RunGitCommand(t, true, "push", "-u", "origin", "master:someremotebranch")
	ref, err = CurrentRemoteRef()
	assert.Equal(t, nil, err)
	assert.Equal(t, &Ref{"origin/someremotebranch", RefTypeRemoteBranch, outputs[2].Sha}, ref)

	refname, err := RemoteRefNameForCurrentBranch()
	assert.Equal(t, nil, err)
	assert.Equal(t, "origin/someremotebranch", refname)

	remote, err := RemoteForCurrentBranch()
	assert.Equal(t, nil, err)
	assert.Equal(t, "origin", remote)
}
Example #3
0
func TestLocalRefs(t *testing.T) {
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
	}()

	repo.AddCommits([]*test.CommitInput{
		{
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 20},
			},
		},
		{
			NewBranch:      "branch",
			ParentBranches: []string{"master"},
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 20},
			},
		},
	})

	test.RunGitCommand(t, true, "tag", "v1")

	refs, err := LocalRefs()
	if err != nil {
		t.Fatal(err)
	}

	actual := make(map[string]bool)
	for _, r := range refs {
		t.Logf("REF: %s", r.Name)
		switch r.Type {
		case RefTypeHEAD:
			t.Errorf("Local HEAD ref: %v", r)
		case RefTypeOther:
			t.Errorf("Stash or unknown ref: %v", r)
		case RefTypeRemoteBranch, RefTypeRemoteTag:
			t.Errorf("Remote ref: %v", r)
		default:
			actual[r.Name] = true
		}
	}

	expected := []string{"master", "branch", "v1"}
	found := 0
	for _, refname := range expected {
		if actual[refname] {
			found += 1
		} else {
			t.Errorf("could not find ref %q", refname)
		}
	}

	if found != len(expected) {
		t.Errorf("Unexpected local refs: %v", actual)
	}
}
Example #4
0
func TestUploadApiError(t *testing.T) {
	SetupTestCredentialsFunc()
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
		RestoreCredentialsFunc()
	}()

	mux := http.NewServeMux()
	server := httptest.NewServer(mux)
	tmp := tempdir(t)
	defer server.Close()
	defer os.RemoveAll(tmp)

	postCalled := false

	mux.HandleFunc("/media/objects", func(w http.ResponseWriter, r *http.Request) {
		postCalled = true
		w.WriteHeader(404)
	})

	cfg := config.NewFrom(config.Values{
		Git: map[string]string{
			"lfs.url": server.URL + "/media",
		},
	})

	oidPath, _ := lfs.LocalMediaPath("988881adc9fc3655077dc2d4d757d480b5ea0e11")
	if err := ioutil.WriteFile(oidPath, []byte("test"), 0744); err != nil {
		t.Fatal(err)
	}

	oid := filepath.Base(oidPath)
	stat, _ := os.Stat(oidPath)
	_, _, err := api.BatchOrLegacySingle(cfg, &api.ObjectResource{Oid: oid, Size: stat.Size()}, "upload", []string{"basic"})
	if err == nil {
		t.Fatal(err)
	}

	if errors.IsFatalError(err) {
		t.Fatal("should not panic")
	}

	if isDockerConnectionError(err) {
		return
	}

	expected := "LFS: " + fmt.Sprintf(httputil.GetDefaultError(404), server.URL+"/media/objects")
	if err.Error() != expected {
		t.Fatalf("Expected: %s\nGot: %s", expected, err.Error())
	}

	if !postCalled {
		t.Errorf("POST not called")
	}
}
Example #5
0
func buildTestData() (oidsExist, oidsMissing []TestObject, err error) {
	const oidCount = 50
	oidsExist = make([]TestObject, 0, oidCount)
	oidsMissing = make([]TestObject, 0, oidCount)

	// Build test data for existing files & upload
	// Use test repo for this to simplify the process of making sure data matches oid
	// We're not performing a real test at this point (although an upload fail will break it)
	var callback testDataCallback
	repo := test.NewRepo(&callback)
	repo.Pushd()
	defer repo.Cleanup()
	// just one commit
	commit := test.CommitInput{CommitterName: "A N Other", CommitterEmail: "*****@*****.**"}
	var totalSize int64
	for i := 0; i < oidCount; i++ {
		filename := fmt.Sprintf("file%d.dat", i)
		sz := int64(rand.Intn(200)) + 50
		commit.Files = append(commit.Files, &test.FileInput{Filename: filename, Size: sz})
		totalSize += sz
	}
	outputs := repo.AddCommits([]*test.CommitInput{&commit})

	// now upload
	uploadQueue := lfs.NewUploadQueue(len(oidsExist), totalSize, false)
	for _, f := range outputs[0].Files {
		oidsExist = append(oidsExist, TestObject{Oid: f.Oid, Size: f.Size})

		u, err := lfs.NewUploadable(f.Oid, "Test file")
		if err != nil {
			return nil, nil, err
		}
		uploadQueue.Add(u)
	}
	uploadQueue.Wait()

	for _, err := range uploadQueue.Errors() {
		if errors.IsFatalError(err) {
			exit("Fatal error setting up test data: %s", err)
		}
	}

	// Generate SHAs for missing files, random but repeatable
	// No actual file content needed for these
	rand.Seed(int64(oidCount))
	runningSha := sha256.New()
	for i := 0; i < oidCount; i++ {
		runningSha.Write([]byte{byte(rand.Intn(256))})
		oid := hex.EncodeToString(runningSha.Sum(nil))
		sz := int64(rand.Intn(200)) + 50
		oidsMissing = append(oidsMissing, TestObject{Oid: oid, Size: sz})
	}
	return oidsExist, oidsMissing, nil
}
Example #6
0
func TestResolveEmptyCurrentRef(t *testing.T) {
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
	}()

	_, err := CurrentRef()
	assert.NotEqual(t, nil, err)
}
Example #7
0
func TestAllCurrentObjectsNone(t *testing.T) {
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
	}()

	actual := AllLocalObjects()

	assert.Equal(t, []*Pointer{}, actual, "Should be no objects")
}
Example #8
0
func TestGitAndRootDirs(t *testing.T) {
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
	}()

	git, root, err := GitAndRootDirs()
	if err != nil {
		t.Fatal(err)
	}

	assert.Equal(t, git, filepath.Join(root, ".git"))
}
Example #9
0
func TestAllCurrentObjectsNone(t *testing.T) {
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
	}()

	actual := lfs.AllObjects()
	if len(actual) > 0 {
		for _, file := range actual {
			t.Logf("Found: %v", file)
		}
		t.Error("Should be no objects")
	}
}
Example #10
0
func TestAllCurrentObjectsSome(t *testing.T) {
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
	}()

	// We're not testing commits here, just storage, so just create a single
	// commit input with lots of files to generate many oids
	numFiles := 20
	files := make([]*test.FileInput, 0, numFiles)
	for i := 0; i < numFiles; i++ {
		// Must be >=16 bytes for each file to be unique
		files = append(files, &test.FileInput{Filename: fmt.Sprintf("file%d.txt", i), Size: 30})
	}

	inputs := []*test.CommitInput{
		{Files: files},
	}

	outputs := repo.AddCommits(inputs)

	expected := make([]*lfs.Pointer, 0, numFiles)
	for _, f := range outputs[0].Files {
		expected = append(expected, f)
	}

	actualObjects := lfs.AllObjects()
	actual := make([]*lfs.Pointer, len(actualObjects))
	for idx, f := range actualObjects {
		actual[idx] = lfs.NewPointer(f.Oid, f.Size, nil)
	}

	// sort to ensure comparison is equal
	sort.Sort(test.PointersByOid(expected))
	sort.Sort(test.PointersByOid(actual))
	assert.Equal(t, expected, actual, "Oids from disk should be the same as in commits")

}
Example #11
0
func TestGetTrackedFiles(t *testing.T) {
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
	}()

	// test commits; we'll just modify the same file each time since we're
	// only interested in branches
	inputs := []*test.CommitInput{
		{ // 0
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 20},
				{Filename: "file2.txt", Size: 20},
				{Filename: "folder1/file10.txt", Size: 20},
				{Filename: "folder1/anotherfile.txt", Size: 20},
			},
		},
		{ // 1
			Files: []*test.FileInput{
				{Filename: "file3.txt", Size: 20},
				{Filename: "file4.txt", Size: 20},
				{Filename: "folder2/something.txt", Size: 20},
				{Filename: "folder2/folder3/deep.txt", Size: 20},
			},
		},
	}
	repo.AddCommits(inputs)

	tracked, err := GetTrackedFiles("*.txt")
	assert.Nil(t, err)
	sort.Strings(tracked) // for direct comparison
	fulllist := []string{"file1.txt", "file2.txt", "file3.txt", "file4.txt", "folder1/anotherfile.txt", "folder1/file10.txt", "folder2/folder3/deep.txt", "folder2/something.txt"}
	assert.Equal(t, fulllist, tracked)

	tracked, err = GetTrackedFiles("*file*.txt")
	assert.Nil(t, err)
	sort.Strings(tracked)
	sublist := []string{"file1.txt", "file2.txt", "file3.txt", "file4.txt", "folder1/anotherfile.txt", "folder1/file10.txt"}
	assert.Equal(t, sublist, tracked)

	tracked, err = GetTrackedFiles("folder1/*")
	assert.Nil(t, err)
	sort.Strings(tracked)
	sublist = []string{"folder1/anotherfile.txt", "folder1/file10.txt"}
	assert.Equal(t, sublist, tracked)

	tracked, err = GetTrackedFiles("folder2/*")
	assert.Nil(t, err)
	sort.Strings(tracked)
	sublist = []string{"folder2/folder3/deep.txt", "folder2/something.txt"}
	assert.Equal(t, sublist, tracked)

	// relative dir
	os.Chdir("folder1")
	tracked, err = GetTrackedFiles("*.txt")
	assert.Nil(t, err)
	sort.Strings(tracked)
	sublist = []string{"anotherfile.txt", "file10.txt"}
	assert.Equal(t, sublist, tracked)
	os.Chdir("..")

	// Test includes staged but uncommitted files
	ioutil.WriteFile("z_newfile.txt", []byte("Hello world"), 0644)
	test.RunGitCommand(t, true, "add", "z_newfile.txt")
	tracked, err = GetTrackedFiles("*.txt")
	assert.Nil(t, err)
	sort.Strings(tracked)
	fulllist = append(fulllist, "z_newfile.txt")
	assert.Equal(t, fulllist, tracked)

	// Test includes modified files (not staged)
	ioutil.WriteFile("file1.txt", []byte("Modifications"), 0644)
	tracked, err = GetTrackedFiles("*.txt")
	assert.Nil(t, err)
	sort.Strings(tracked)
	assert.Equal(t, fulllist, tracked)

	// Test includes modified files (staged)
	test.RunGitCommand(t, true, "add", "file1.txt")
	tracked, err = GetTrackedFiles("*.txt")
	assert.Nil(t, err)
	sort.Strings(tracked)
	assert.Equal(t, fulllist, tracked)

	// Test excludes deleted files (not committed)
	test.RunGitCommand(t, true, "rm", "file2.txt")
	tracked, err = GetTrackedFiles("*.txt")
	assert.Nil(t, err)
	sort.Strings(tracked)
	deletedlist := []string{"file1.txt", "file3.txt", "file4.txt", "folder1/anotherfile.txt", "folder1/file10.txt", "folder2/folder3/deep.txt", "folder2/something.txt", "z_newfile.txt"}
	assert.Equal(t, deletedlist, tracked)

}
Example #12
0
func TestWorkTrees(t *testing.T) {

	// Only git 2.5+
	if !Config.IsGitVersionAtLeast("2.5.0") {
		return
	}

	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
	}()

	// test commits; we'll just modify the same file each time since we're
	// only interested in branches & dates
	inputs := []*test.CommitInput{
		{ // 0
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 20},
			},
		},
		{ // 1
			NewBranch: "branch2",
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 25},
			},
		},
		{ // 2
			NewBranch:      "branch3",
			ParentBranches: []string{"master"}, // back on master
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 30},
			},
		},
		{ // 3
			NewBranch:      "branch4",
			ParentBranches: []string{"master"}, // back on master
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 40},
			},
		},
	}
	outputs := repo.AddCommits(inputs)
	// Checkout master again otherwise can't create a worktree from branch4 if we're on it here
	test.RunGitCommand(t, true, "checkout", "master")

	// We can create worktrees as subfolders for convenience
	// Each one is checked out to a different branch
	// Note that we *won't* create one for branch3
	test.RunGitCommand(t, true, "worktree", "add", "branch2_wt", "branch2")
	test.RunGitCommand(t, true, "worktree", "add", "branch4_wt", "branch4")

	refs, err := GetAllWorkTreeHEADs(filepath.Join(repo.Path, ".git"))
	assert.Equal(t, nil, err)
	expectedRefs := []*Ref{
		&Ref{"master", RefTypeLocalBranch, outputs[0].Sha},
		&Ref{"branch2", RefTypeLocalBranch, outputs[1].Sha},
		&Ref{"branch4", RefTypeLocalBranch, outputs[3].Sha},
	}
	// Need to sort for consistent comparison
	sort.Sort(test.RefsByName(expectedRefs))
	sort.Sort(test.RefsByName(refs))
	assert.Equal(t, expectedRefs, refs, "Refs should be correct")
}
Example #13
0
func TestScanPreviousVersions(t *testing.T) {
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
	}()

	now := time.Now()

	inputs := []*test.CommitInput{
		{ // 0
			CommitDate: now.AddDate(0, 0, -20),
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 20},
				{Filename: "file2.txt", Size: 30},
				{Filename: "folder/nested.txt", Size: 40},
				{Filename: "folder/nested2.txt", Size: 31},
			},
		},
		{ // 1
			CommitDate: now.AddDate(0, 0, -10),
			Files: []*test.FileInput{
				{Filename: "file2.txt", Size: 22},
			},
		},
		{ // 2
			NewBranch:  "excluded",
			CommitDate: now.AddDate(0, 0, -6),
			Files: []*test.FileInput{
				{Filename: "file2.txt", Size: 12},
				{Filename: "folder/nested2.txt", Size: 16},
			},
		},
		{ // 3
			ParentBranches: []string{"master"},
			CommitDate:     now.AddDate(0, 0, -4),
			Files: []*test.FileInput{
				{Filename: "folder/nested.txt", Size: 42},
				{Filename: "folder/nested2.txt", Size: 6},
			},
		},
		{ // 4
			Files: []*test.FileInput{
				{Filename: "folder/nested.txt", Size: 22},
			},
		},
	}
	outputs := repo.AddCommits(inputs)

	// Previous commits excludes final state of each file, which is:
	// file1.txt            [0] (unchanged since first commit so excluded)
	// file2.txt            [1] (because [2] is on another branch so excluded)
	// folder/nested.txt    [4] (updated at last commit)
	// folder/nested2.txt   [3]

	// The only changes which will be included are changes prior to final state
	// where the '-' side of the diff is inside the date range

	// 7 day limit excludes [0] commit, but includes state from that if there
	// was a subsequent change
	pointers, err := ScanPreviousVersions("master", now.AddDate(0, 0, -7))
	assert.Equal(t, nil, err)

	// Includes the following 'before' state at commits:
	// folder/nested.txt [-diff at 4, ie 3, -diff at 3 ie 0]
	// folder/nested2.txt [-diff at 3 ie 0]
	// others are either on diff branches, before this window, or unchanged
	expected := []*WrappedPointer{
		{Name: "folder/nested.txt", Size: outputs[3].Files[0].Size, Pointer: outputs[3].Files[0]},
		{Name: "folder/nested.txt", Size: outputs[0].Files[2].Size, Pointer: outputs[0].Files[2]},
		{Name: "folder/nested2.txt", Size: outputs[0].Files[3].Size, Pointer: outputs[0].Files[3]},
	}
	// Need to sort to compare equality
	sort.Sort(test.WrappedPointersByOid(expected))
	sort.Sort(test.WrappedPointersByOid(pointers))
	assert.Equal(t, expected, pointers)

}
Example #14
0
func TestScanUnpushed(t *testing.T) {
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
	}()

	inputs := []*test.CommitInput{
		{ // 0
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 20},
			},
		},
		{ // 1
			NewBranch: "branch2",
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 25},
			},
		},
		{ // 2
			ParentBranches: []string{"master"}, // back on master
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 30},
			},
		},
		{ // 3
			NewBranch: "branch3",
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 32},
			},
		},
	}
	repo.AddCommits(inputs)

	// Add a couple of remotes and test state depending on what's pushed
	repo.AddRemote("origin")
	repo.AddRemote("upstream")

	pointers, err := ScanUnpushed("")
	assert.Equal(t, nil, err, "Should be no error calling ScanUnpushed")
	assert.Equal(t, 4, len(pointers), "Should be 4 pointers because none pushed")

	test.RunGitCommand(t, true, "push", "origin", "branch2")
	// Branch2 will have pushed 2 commits
	pointers, err = ScanUnpushed("")
	assert.Equal(t, nil, err, "Should be no error calling ScanUnpushed")
	assert.Equal(t, 2, len(pointers), "Should be 2 pointers")

	test.RunGitCommand(t, true, "push", "upstream", "master")
	// Master pushes 1 more commit
	pointers, err = ScanUnpushed("")
	assert.Equal(t, nil, err, "Should be no error calling ScanUnpushed")
	assert.Equal(t, 1, len(pointers), "Should be 1 pointer")

	test.RunGitCommand(t, true, "push", "origin", "branch3")
	// All pushed (somewhere)
	pointers, err = ScanUnpushed("")
	assert.Equal(t, nil, err, "Should be no error calling ScanUnpushed")
	assert.Equal(t, 0, len(pointers), "Should be 0 pointers unpushed")

	// Check origin
	pointers, err = ScanUnpushed("origin")
	assert.Equal(t, nil, err, "Should be no error calling ScanUnpushed")
	assert.Equal(t, 0, len(pointers), "Should be 0 pointers unpushed to origin")

	// Check upstream
	pointers, err = ScanUnpushed("upstream")
	assert.Equal(t, nil, err, "Should be no error calling ScanUnpushed")
	assert.Equal(t, 2, len(pointers), "Should be 2 pointers unpushed to upstream")
}
Example #15
0
func TestUploadVerifyError(t *testing.T) {
	SetupTestCredentialsFunc()
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
		RestoreCredentialsFunc()
	}()

	mux := http.NewServeMux()
	server := httptest.NewServer(mux)
	tmp := tempdir(t)
	defer server.Close()
	defer os.RemoveAll(tmp)

	postCalled := false
	verifyCalled := false

	mux.HandleFunc("/media/objects", func(w http.ResponseWriter, r *http.Request) {
		t.Logf("Server: %s %s", r.Method, r.URL)

		if r.Method != "POST" {
			w.WriteHeader(405)
			return
		}

		if r.Header.Get("Accept") != api.MediaType {
			t.Errorf("Invalid Accept")
		}

		if r.Header.Get("Content-Type") != api.MediaType {
			t.Errorf("Invalid Content-Type")
		}

		buf := &bytes.Buffer{}
		tee := io.TeeReader(r.Body, buf)
		reqObj := &api.ObjectResource{}
		err := json.NewDecoder(tee).Decode(reqObj)
		t.Logf("request header: %v", r.Header)
		t.Logf("request body: %s", buf.String())
		if err != nil {
			t.Fatal(err)
		}

		if reqObj.Oid != "988881adc9fc3655077dc2d4d757d480b5ea0e11" {
			t.Errorf("invalid oid from request: %s", reqObj.Oid)
		}

		if reqObj.Size != 4 {
			t.Errorf("invalid size from request: %d", reqObj.Size)
		}

		obj := &api.ObjectResource{
			Oid:  reqObj.Oid,
			Size: reqObj.Size,
			Actions: map[string]*api.LinkRelation{
				"upload": &api.LinkRelation{
					Href:   server.URL + "/upload",
					Header: map[string]string{"A": "1"},
				},
				"verify": &api.LinkRelation{
					Href:   server.URL + "/verify",
					Header: map[string]string{"B": "2"},
				},
			},
		}

		by, err := json.Marshal(obj)
		if err != nil {
			t.Fatal(err)
		}

		postCalled = true
		head := w.Header()
		head.Set("Content-Type", api.MediaType)
		head.Set("Content-Length", strconv.Itoa(len(by)))
		w.WriteHeader(202)
		w.Write(by)
	})

	mux.HandleFunc("/verify", func(w http.ResponseWriter, r *http.Request) {
		verifyCalled = true
		w.WriteHeader(404)
	})

	cfg := config.NewFrom(config.Values{
		Git: map[string]string{
			"lfs.url": server.URL + "/media",
		},
	})

	oidPath, _ := lfs.LocalMediaPath("988881adc9fc3655077dc2d4d757d480b5ea0e11")
	if err := ioutil.WriteFile(oidPath, []byte("test"), 0744); err != nil {
		t.Fatal(err)
	}

	oid := filepath.Base(oidPath)
	stat, _ := os.Stat(oidPath)
	o, _, err := api.BatchOrLegacySingle(cfg, &api.ObjectResource{Oid: oid, Size: stat.Size()}, "upload", []string{"basic"})
	if err != nil {
		if isDockerConnectionError(err) {
			return
		}
		t.Fatal(err)
	}
	err = api.VerifyUpload(cfg, o)
	if err == nil {
		t.Fatal("verify should fail")
	}

	if errors.IsFatalError(err) {
		t.Fatal("should not panic")
	}

	expected := fmt.Sprintf(httputil.GetDefaultError(404), server.URL+"/verify")
	if err.Error() != expected {
		t.Fatalf("Expected: %s\nGot: %s", expected, err.Error())
	}

	if !postCalled {
		t.Errorf("POST not called")
	}

	if !verifyCalled {
		t.Errorf("verify not called")
	}

}
Example #16
0
func TestRecentBranches(t *testing.T) {
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
	}()

	now := time.Now()
	// test commits; we'll just modify the same file each time since we're
	// only interested in branches & dates
	inputs := []*test.CommitInput{
		{ // 0
			CommitDate: now.AddDate(0, 0, -20),
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 20},
			},
		},
		{ // 1
			CommitDate: now.AddDate(0, 0, -15),
			NewBranch:  "excluded_branch", // new branch & tag but too old
			Tags:       []string{"excluded_tag"},
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 25},
			},
		},
		{ // 2
			CommitDate:     now.AddDate(0, 0, -12),
			ParentBranches: []string{"master"}, // back on master
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 30},
			},
		},
		{ // 3
			CommitDate: now.AddDate(0, 0, -6),
			NewBranch:  "included_branch", // new branch within 7 day limit
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 32},
			},
		},
		{ // 4
			CommitDate: now.AddDate(0, 0, -3),
			NewBranch:  "included_branch_2", // new branch within 7 day limit
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 36},
			},
		},
		{ // 5
			// Final commit, current date/time
			ParentBranches: []string{"master"}, // back on master
			Files: []*test.FileInput{
				{Filename: "file1.txt", Size: 21},
			},
		},
	}
	outputs := repo.AddCommits(inputs)

	// Add a couple of remotes and push some branches
	repo.AddRemote("origin")
	repo.AddRemote("upstream")

	test.RunGitCommand(t, true, "push", "origin", "master")
	test.RunGitCommand(t, true, "push", "origin", "excluded_branch")
	test.RunGitCommand(t, true, "push", "origin", "included_branch")
	test.RunGitCommand(t, true, "push", "upstream", "master")
	test.RunGitCommand(t, true, "push", "upstream", "included_branch_2")

	// Recent, local only
	refs, err := RecentBranches(now.AddDate(0, 0, -7), false, "")
	assert.Equal(t, nil, err)
	expectedRefs := []*Ref{
		&Ref{"master", RefTypeLocalBranch, outputs[5].Sha},
		&Ref{"included_branch_2", RefTypeLocalBranch, outputs[4].Sha},
		&Ref{"included_branch", RefTypeLocalBranch, outputs[3].Sha},
	}
	assert.Equal(t, expectedRefs, refs, "Refs should be correct")

	// Recent, remotes too (all of them)
	refs, err = RecentBranches(now.AddDate(0, 0, -7), true, "")
	assert.Equal(t, nil, err)
	expectedRefs = []*Ref{
		&Ref{"master", RefTypeLocalBranch, outputs[5].Sha},
		&Ref{"included_branch_2", RefTypeLocalBranch, outputs[4].Sha},
		&Ref{"included_branch", RefTypeLocalBranch, outputs[3].Sha},
		&Ref{"upstream/master", RefTypeRemoteBranch, outputs[5].Sha},
		&Ref{"upstream/included_branch_2", RefTypeRemoteBranch, outputs[4].Sha},
		&Ref{"origin/master", RefTypeRemoteBranch, outputs[5].Sha},
		&Ref{"origin/included_branch", RefTypeRemoteBranch, outputs[3].Sha},
	}
	// Need to sort for consistent comparison
	sort.Sort(test.RefsByName(expectedRefs))
	sort.Sort(test.RefsByName(refs))
	assert.Equal(t, expectedRefs, refs, "Refs should be correct")

	// Recent, only single remote
	refs, err = RecentBranches(now.AddDate(0, 0, -7), true, "origin")
	assert.Equal(t, nil, err)
	expectedRefs = []*Ref{
		&Ref{"master", RefTypeLocalBranch, outputs[5].Sha},
		&Ref{"origin/master", RefTypeRemoteBranch, outputs[5].Sha},
		&Ref{"included_branch_2", RefTypeLocalBranch, outputs[4].Sha},
		&Ref{"included_branch", RefTypeLocalBranch, outputs[3].Sha},
		&Ref{"origin/included_branch", RefTypeRemoteBranch, outputs[3].Sha},
	}
	// Need to sort for consistent comparison
	sort.Sort(test.RefsByName(expectedRefs))
	sort.Sort(test.RefsByName(refs))
	assert.Equal(t, expectedRefs, refs, "Refs should be correct")
}
Example #17
0
func TestUploadWithRedirect(t *testing.T) {
	SetupTestCredentialsFunc()
	repo := test.NewRepo(t)
	repo.Pushd()
	defer func() {
		repo.Popd()
		repo.Cleanup()
		RestoreCredentialsFunc()
	}()

	mux := http.NewServeMux()
	server := httptest.NewServer(mux)
	tmp := tempdir(t)
	defer server.Close()
	defer os.RemoveAll(tmp)

	mux.HandleFunc("/redirect/objects", func(w http.ResponseWriter, r *http.Request) {
		t.Logf("Server: %s %s", r.Method, r.URL)

		if r.Method != "POST" {
			w.WriteHeader(405)
			return
		}

		w.Header().Set("Location", server.URL+"/redirect2/objects")
		w.WriteHeader(307)
	})

	mux.HandleFunc("/redirect2/objects", func(w http.ResponseWriter, r *http.Request) {
		t.Logf("Server: %s %s", r.Method, r.URL)

		if r.Method != "POST" {
			w.WriteHeader(405)
			return
		}

		w.Header().Set("Location", server.URL+"/media/objects")
		w.WriteHeader(307)
	})

	mux.HandleFunc("/media/objects", func(w http.ResponseWriter, r *http.Request) {
		t.Logf("Server: %s %s", r.Method, r.URL)

		if r.Method != "POST" {
			w.WriteHeader(405)
			return
		}

		if r.Header.Get("Accept") != api.MediaType {
			t.Errorf("Invalid Accept")
		}

		if r.Header.Get("Content-Type") != api.MediaType {
			t.Errorf("Invalid Content-Type")
		}

		buf := &bytes.Buffer{}
		tee := io.TeeReader(r.Body, buf)
		reqObj := &api.ObjectResource{}
		err := json.NewDecoder(tee).Decode(reqObj)
		t.Logf("request header: %v", r.Header)
		t.Logf("request body: %s", buf.String())
		if err != nil {
			t.Fatal(err)
		}

		if reqObj.Oid != "988881adc9fc3655077dc2d4d757d480b5ea0e11" {
			t.Errorf("invalid oid from request: %s", reqObj.Oid)
		}

		if reqObj.Size != 4 {
			t.Errorf("invalid size from request: %d", reqObj.Size)
		}

		obj := &api.ObjectResource{
			Actions: map[string]*api.LinkRelation{
				"upload": &api.LinkRelation{
					Href:   server.URL + "/upload",
					Header: map[string]string{"A": "1"},
				},
				"verify": &api.LinkRelation{
					Href:   server.URL + "/verify",
					Header: map[string]string{"B": "2"},
				},
			},
		}

		by, err := json.Marshal(obj)
		if err != nil {
			t.Fatal(err)
		}

		head := w.Header()
		head.Set("Content-Type", api.MediaType)
		head.Set("Content-Length", strconv.Itoa(len(by)))
		w.WriteHeader(200)
		w.Write(by)
	})

	defer config.Config.ResetConfig()
	config.Config.SetConfig("lfs.url", server.URL+"/redirect")

	oidPath, _ := lfs.LocalMediaPath("988881adc9fc3655077dc2d4d757d480b5ea0e11")
	if err := ioutil.WriteFile(oidPath, []byte("test"), 0744); err != nil {
		t.Fatal(err)
	}

	oid := filepath.Base(oidPath)
	stat, _ := os.Stat(oidPath)
	o, _, err := api.BatchOrLegacySingle(&api.ObjectResource{Oid: oid, Size: stat.Size()}, "upload", []string{"basic"})
	if err != nil {
		if isDockerConnectionError(err) {
			return
		}
		t.Fatal(err)
	}

	if o != nil {
		t.Fatal("Received an object")
	}
}