func (server *isolatedFake) fakeCloudStorage(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() if r.Header.Get("Content-Type") != "application/octet-stream" { w.WriteHeader(400) server.Fail(fmt.Errorf("invalid content type: %s", r.Header.Get("Content-Type"))) return } if r.Method != "PUT" { w.WriteHeader(405) server.Fail(fmt.Errorf("invalid method: %s", r.Method)) return } raw, err := ioutil.ReadAll(isolated.GetDecompressor(r.Body)) if err != nil { w.WriteHeader(500) server.Fail(err) return } digest := isolated.HexDigest(r.URL.Query().Get("digest")) if digest != isolated.HashBytes(raw) { w.WriteHeader(400) server.Fail(fmt.Errorf("invalid digest %#v", digest)) return } server.lock.Lock() defer server.lock.Unlock() server.staging[digest] = raw w.WriteHeader(200) }
func makeItems(contents ...string) items { out := items{} for _, content := range contents { c := []byte(content) hex := isolated.HashBytes(c) out.digests = append(out.digests, &isolated.DigestItem{hex, false, int64(len(content))}) out.contents = append(out.contents, c) } return out }
func (m *memory) Add(digest isolated.HexDigest, src io.Reader) error { if !digest.Validate() { return os.ErrInvalid } // TODO(maruel): Use a LimitedReader flavor that fails when reaching limit. content, err := ioutil.ReadAll(src) if err != nil { return err } if isolated.HashBytes(content) != digest { return errors.New("invalid hash") } if common.Size(len(content)) > m.policies.MaxSize { return errors.New("item too large") } m.lock.Lock() defer m.lock.Unlock() m.data[digest] = content m.lru.pushFront(digest, common.Size(len(content))) m.respectPolicies() return nil }
func (server *isolatedFake) storeInline(r *http.Request) interface{} { data := &isolated.StorageRequest{} if err := json.NewDecoder(r.Body).Decode(data); err != nil { server.Fail(err) return map[string]string{"err": err.Error()} } prefix := "ticket:" if !strings.HasPrefix(data.UploadTicket, prefix) { err := fmt.Errorf("unexpected ticket %#v", data.UploadTicket) server.Fail(err) return map[string]string{"err": err.Error()} } digest := isolated.HexDigest(data.UploadTicket[len(prefix):]) if !digest.Validate() { err := fmt.Errorf("invalid digest %#v", digest) server.Fail(err) return map[string]string{"err": err.Error()} } raw, err := ioutil.ReadAll(isolated.GetDecompressor(bytes.NewBuffer(data.Content))) if err != nil { server.Fail(err) return map[string]string{"err": err.Error()} } if digest != isolated.HashBytes(raw) { err := fmt.Errorf("invalid digest %#v", digest) server.Fail(err) return map[string]string{"err": err.Error()} } server.lock.Lock() defer server.lock.Unlock() server.contents[digest] = raw return map[string]string{"ok": "true"} }
func TestArchive(t *testing.T) { // Create a .isolate file and archive it. t.Parallel() server := isolatedfake.New() ts := httptest.NewServer(server) defer ts.Close() a := archiver.New(isolatedclient.New(ts.URL, "default-gzip"), nil) // Setup temporary directory. // /base/bar // /base/ignored // /foo/baz.isolate // /link -> /base/bar // Result: // /baz.isolated tmpDir, err := ioutil.TempDir("", "isolate") ut.AssertEqual(t, nil, err) defer func() { if err := os.RemoveAll(tmpDir); err != nil { t.Fail() } }() baseDir := filepath.Join(tmpDir, "base") fooDir := filepath.Join(tmpDir, "foo") ut.AssertEqual(t, nil, os.Mkdir(baseDir, 0700)) ut.AssertEqual(t, nil, os.Mkdir(fooDir, 0700)) ut.AssertEqual(t, nil, ioutil.WriteFile(filepath.Join(baseDir, "bar"), []byte("foo"), 0600)) ut.AssertEqual(t, nil, ioutil.WriteFile(filepath.Join(baseDir, "ignored"), []byte("ignored"), 0600)) isolate := `{ 'variables': { 'files': [ '../base/', '../link', ], }, 'conditions': [ ['OS=="amiga"', { 'variables': { 'command': ['amiga', '<(EXTRA)'], }, }], ['OS=="win"', { 'variables': { 'command': ['win'], }, }], ], }` isolatePath := filepath.Join(fooDir, "baz.isolate") ut.AssertEqual(t, nil, ioutil.WriteFile(isolatePath, []byte(isolate), 0600)) if !common.IsWindows() { ut.AssertEqual(t, nil, os.Symlink(filepath.Join("base", "bar"), filepath.Join(tmpDir, "link"))) } else { ut.AssertEqual(t, nil, ioutil.WriteFile(filepath.Join(tmpDir, "link"), []byte("no link on Windows"), 0600)) } opts := &ArchiveOptions{ Isolate: isolatePath, Isolated: filepath.Join(tmpDir, "baz.isolated"), Blacklist: common.Strings{"ignored", "*.isolate"}, PathVariables: map[string]string{"VAR": "wonderful"}, ExtraVariables: map[string]string{"EXTRA": "really"}, ConfigVariables: map[string]string{"OS": "amiga"}, } future := Archive(a, opts) ut.AssertEqual(t, "baz.isolated", future.DisplayName()) future.WaitForHashed() ut.AssertEqual(t, nil, future.Error()) ut.AssertEqual(t, nil, a.Close()) mode := 0600 if common.IsWindows() { mode = 0666 } // /base/ isolatedDirData := isolated.Isolated{ Algo: "sha-1", Files: map[string]isolated.File{ filepath.Join("base", "bar"): {Digest: "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33", Mode: newInt(mode), Size: newInt64(3)}, }, Version: isolated.IsolatedFormatVersion, } encoded, err := json.Marshal(isolatedDirData) ut.AssertEqual(t, nil, err) isolatedDirEncoded := string(encoded) + "\n" isolatedDirHash := isolated.HashBytes([]byte(isolatedDirEncoded)) isolatedData := isolated.Isolated{ Algo: "sha-1", Command: []string{"amiga", "really"}, Files: map[string]isolated.File{}, Includes: []isolated.HexDigest{isolatedDirHash}, RelativeCwd: "foo", Version: isolated.IsolatedFormatVersion, } if !common.IsWindows() { isolatedData.Files["link"] = isolated.File{Link: newString(filepath.Join("base", "bar"))} } else { isolatedData.Files["link"] = isolated.File{Digest: "12339b9756c2994f85c310d560bc8c142a6b79a1", Mode: newInt(0666), Size: newInt64(18)} } encoded, err = json.Marshal(isolatedData) ut.AssertEqual(t, nil, err) isolatedEncoded := string(encoded) + "\n" isolatedHash := isolated.HashBytes([]byte(isolatedEncoded)) expected := map[string]string{ "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33": "foo", string(isolatedDirHash): isolatedDirEncoded, string(isolatedHash): isolatedEncoded, } if common.IsWindows() { expected["12339b9756c2994f85c310d560bc8c142a6b79a1"] = "no link on Windows" } actual := map[string]string{} for k, v := range server.Contents() { actual[string(k)] = string(v) ut.AssertEqualf(t, expected[string(k)], actual[string(k)], "%s: %#v", k, actual[string(k)]) } ut.AssertEqual(t, expected, actual) ut.AssertEqual(t, isolatedHash, future.Digest()) stats := a.Stats() ut.AssertEqual(t, 0, stats.TotalHits()) ut.AssertEqual(t, common.Size(0), stats.TotalBytesHits()) if !common.IsWindows() { ut.AssertEqual(t, 3, stats.TotalMisses()) ut.AssertEqual(t, common.Size(3+len(isolatedDirEncoded)+len(isolatedEncoded)), stats.TotalBytesPushed()) } else { ut.AssertEqual(t, 4, stats.TotalMisses()) ut.AssertEqual(t, common.Size(3+18+len(isolatedDirEncoded)+len(isolatedEncoded)), stats.TotalBytesPushed()) } ut.AssertEqual(t, nil, server.Error()) digest, err := isolated.HashFile(filepath.Join(tmpDir, "baz.isolated")) ut.AssertEqual(t, isolated.DigestItem{isolatedHash, false, int64(len(isolatedEncoded))}, digest) ut.AssertEqual(t, nil, err) }
func TestPushDirectory(t *testing.T) { // Uploads a real directory. 2 times the same file. t.Parallel() server := isolatedfake.New() ts := httptest.NewServer(server) defer ts.Close() a := New(isolatedclient.New(ts.URL, "default-gzip"), nil) // Setup temporary directory. tmpDir, err := ioutil.TempDir("", "archiver") ut.AssertEqual(t, nil, err) defer func() { if err := os.RemoveAll(tmpDir); err != nil { t.Fail() } }() baseDir := filepath.Join(tmpDir, "base") ignoredDir := filepath.Join(tmpDir, "ignored1") ut.AssertEqual(t, nil, os.Mkdir(baseDir, 0700)) ut.AssertEqual(t, nil, ioutil.WriteFile(filepath.Join(baseDir, "bar"), []byte("foo"), 0600)) ut.AssertEqual(t, nil, ioutil.WriteFile(filepath.Join(baseDir, "bar_dupe"), []byte("foo"), 0600)) if !common.IsWindows() { ut.AssertEqual(t, nil, os.Symlink("bar", filepath.Join(baseDir, "link"))) } ut.AssertEqual(t, nil, ioutil.WriteFile(filepath.Join(baseDir, "ignored2"), []byte("ignored"), 0600)) ut.AssertEqual(t, nil, os.Mkdir(ignoredDir, 0700)) ut.AssertEqual(t, nil, ioutil.WriteFile(filepath.Join(ignoredDir, "really"), []byte("ignored"), 0600)) future := PushDirectory(a, tmpDir, "", []string{"ignored1", filepath.Join("*", "ignored2")}) ut.AssertEqual(t, filepath.Base(tmpDir)+".isolated", future.DisplayName()) future.WaitForHashed() ut.AssertEqual(t, nil, a.Close()) mode := 0600 if common.IsWindows() { mode = 0666 } isolatedData := isolated.Isolated{ Algo: "sha-1", Files: map[string]isolated.File{ filepath.Join("base", "bar"): {Digest: "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33", Mode: newInt(mode), Size: newInt64(3)}, filepath.Join("base", "bar_dupe"): {Digest: "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33", Mode: newInt(mode), Size: newInt64(3)}, }, Version: isolated.IsolatedFormatVersion, } if !common.IsWindows() { isolatedData.Files[filepath.Join("base", "link")] = isolated.File{Link: newString("bar")} } encoded, err := json.Marshal(isolatedData) ut.AssertEqual(t, nil, err) isolatedEncoded := string(encoded) + "\n" isolatedHash := isolated.HashBytes([]byte(isolatedEncoded)) expected := map[string]string{ "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33": "foo", string(isolatedHash): isolatedEncoded, } actual := map[string]string{} for k, v := range server.Contents() { actual[string(k)] = string(v) } ut.AssertEqual(t, expected, actual) ut.AssertEqual(t, isolatedHash, future.Digest()) stats := a.Stats() ut.AssertEqual(t, 0, stats.TotalHits()) // There're 3 cache misses even if the same content is looked up twice. ut.AssertEqual(t, 3, stats.TotalMisses()) ut.AssertEqual(t, common.Size(0), stats.TotalBytesHits()) ut.AssertEqual(t, common.Size(3+3+len(isolatedEncoded)), stats.TotalBytesPushed()) ut.AssertEqual(t, nil, server.Error()) }
func (server *isolatedFake) Inject(data []byte) { h := isolated.HashBytes(data) server.lock.Lock() defer server.lock.Unlock() server.contents[h] = data }
func testCache(t *testing.T, c Cache) []isolated.HexDigest { // c's policies must have MaxItems == 2 and MaxSize == 1024. td, err := ioutil.TempDir("", "cache") ut.AssertEqual(t, nil, err) defer func() { if err := os.RemoveAll(td); err != nil { t.Fail() } }() fakeDigest := isolated.HexDigest("0123456789012345678901234567890123456789") badDigest := isolated.HexDigest("012345678901234567890123456789012345678") emptyContent := []byte{} emptyDigest := isolated.HashBytes(emptyContent) file1Content := []byte("foo") file1Digest := isolated.HashBytes(file1Content) file2Content := []byte("foo bar") file2Digest := isolated.HashBytes(file2Content) largeContent := bytes.Repeat([]byte("A"), 1023) largeDigest := isolated.HashBytes(largeContent) tooLargeContent := bytes.Repeat([]byte("A"), 1025) tooLargeDigest := isolated.HashBytes(tooLargeContent) ut.AssertEqual(t, []isolated.HexDigest{}, c.Keys()) ut.AssertEqual(t, false, c.Touch(fakeDigest)) ut.AssertEqual(t, false, c.Touch(badDigest)) c.Evict(fakeDigest) c.Evict(badDigest) r, err := c.Read(fakeDigest) ut.AssertEqual(t, nil, r) ut.AssertEqual(t, true, err != nil) r, err = c.Read(badDigest) ut.AssertEqual(t, nil, r) ut.AssertEqual(t, true, err != nil) // It's too large to fit in the cache. ut.AssertEqual(t, true, nil != c.Add(tooLargeDigest, bytes.NewBuffer(tooLargeContent))) // It gets discarded because it's too large. ut.AssertEqual(t, nil, c.Add(largeDigest, bytes.NewBuffer(largeContent))) ut.AssertEqual(t, nil, c.Add(emptyDigest, bytes.NewBuffer(emptyContent))) ut.AssertEqual(t, nil, c.Add(emptyDigest, bytes.NewBuffer(emptyContent))) ut.AssertEqual(t, []isolated.HexDigest{emptyDigest, largeDigest}, c.Keys()) c.Evict(emptyDigest) ut.AssertEqual(t, []isolated.HexDigest{largeDigest}, c.Keys()) ut.AssertEqual(t, nil, c.Add(emptyDigest, bytes.NewBuffer(emptyContent))) ut.AssertEqual(t, nil, c.Add(file1Digest, bytes.NewBuffer(file1Content))) ut.AssertEqual(t, true, c.Touch(emptyDigest)) ut.AssertEqual(t, nil, c.Add(file2Digest, bytes.NewBuffer(file2Content))) r, err = c.Read(file1Digest) ut.AssertEqual(t, nil, r) ut.AssertEqual(t, true, nil != err) r, err = c.Read(file2Digest) ut.AssertEqual(t, nil, err) actual, err := ioutil.ReadAll(r) ut.AssertEqual(t, nil, r.Close()) ut.AssertEqual(t, nil, err) ut.AssertEqual(t, file2Content, actual) expected := []isolated.HexDigest{file2Digest, emptyDigest} ut.AssertEqual(t, expected, c.Keys()) dest := filepath.Join(td, "foo") ut.AssertEqual(t, true, nil != c.Hardlink(fakeDigest, dest, os.FileMode(0600))) ut.AssertEqual(t, true, nil != c.Hardlink(badDigest, dest, os.FileMode(0600))) ut.AssertEqual(t, nil, c.Hardlink(file2Digest, dest, os.FileMode(0600))) // See comment about the fact that it may or may not work. _ = c.Hardlink(file2Digest, dest, os.FileMode(0600)) actual, err = ioutil.ReadFile(dest) ut.AssertEqual(t, nil, err) ut.AssertEqual(t, file2Content, actual) ut.AssertEqual(t, nil, c.Close()) return expected }