Пример #1
0
// digestsFromTrace returns all the digests in the given trace, controlled by
// 'head', and being robust to tallies not having been calculated for the
// trace.
func digestsFromTrace(id string, tr tiling.Trace, head bool, lastCommitIndex int, traceTally map[string]tally.Tally) []string {
	digests := map[string]bool{}
	if head {
		// Find the last non-missing value in the trace.
		for i := lastCommitIndex; i >= 0; i-- {
			if tr.IsMissing(i) {
				continue
			} else {
				digests[tr.(*types.GoldenTrace).Values[i]] = true
				break
			}
		}
	} else {
		// Use the traceTally if available, otherwise just inspect the trace.
		if t, ok := traceTally[id]; ok {
			for k, _ := range t {
				digests[k] = true
			}
		} else {
			for i := lastCommitIndex; i >= 0; i-- {
				if !tr.IsMissing(i) {
					digests[tr.(*types.GoldenTrace).Values[i]] = true
				}
			}
		}
	}

	return util.KeysOfStringSet(digests)
}
Пример #2
0
func warmTrybotDigests(storages *storage.Storage, traceDigests map[string]bool) error {
	// // Warm the digests that were generated by the last 20 trybots.
	issues, _, err := storages.TrybotResults.List(0, 20)
	if err != nil {
		return err
	}

	trybotDigests := map[string]bool{}
	var wg sync.WaitGroup
	var mutex sync.Mutex
	for _, oneIssue := range issues {
		wg.Add(1)
		go func(issueID string) {
			result, err := storages.TrybotResults.Get(issueID)
			if err != nil {
				glog.Errorf("Unable to retrieve issue %s. Got error: %s", issueID, err)
				return
			}

			mutex.Lock()
			defer mutex.Unlock()
			for _, digest := range result.Digests {
				if !traceDigests[digest] {
					trybotDigests[digest] = true
				}
			}
			wg.Done()
		}(oneIssue.Issue)
	}

	wg.Wait()
	digests := util.KeysOfStringSet(trybotDigests)
	glog.Infof("\n\n\nFOUND %d trybot digests to fetch.\n\n\n\n", len(digests))
	storages.DiffStore.AbsPath(digests)
	return nil
}
Пример #3
0
// CalcSummaries returns a Summary for each test that matches the given input filters.
//
// testNames
//   If not nil or empty then restrict the results to only tests that appear in this slice.
// query
//   URL encoded paramset to use for filtering.
// includeIgnores
//   Boolean, if true then include all digests in the results, including ones normally hidden
//   by the ignores list.
// head
//   Only consider digests at head if true.
//
func (s *Summaries) CalcSummaries(testNames []string, query string, includeIgnores bool, head bool) (map[string]*Summary, error) {
	defer timer.New("CalcSummaries").Stop()
	glog.Infof("CalcSummaries: includeIgnores %v head %v", includeIgnores, head)

	t := timer.New("CalcSummaries:GetLastTileTrimmed")
	tile, err := s.storages.GetLastTileTrimmed(includeIgnores)
	t.Stop()
	if err != nil {
		return nil, fmt.Errorf("Couldn't retrieve tile: %s", err)
	}
	q, err := url.ParseQuery(query)
	if err != nil {
		return nil, fmt.Errorf("Failed to parse Query in CalcSummaries: %s", err)
	}

	ret := map[string]*Summary{}

	t = timer.New("CalcSummaries:Expectations")
	e, err := s.storages.ExpectationsStore.Get()
	t.Stop()
	if err != nil {
		return nil, fmt.Errorf("Couldn't get expectations: %s", err)
	}

	// Filter down to just the traces we are interested in, based on query.
	filtered := map[string][]*TraceID{}
	t = timer.New("Filter Traces")
	for id, tr := range tile.Traces {
		name := tr.Params()[types.PRIMARY_KEY_FIELD]
		if len(testNames) > 0 && !util.In(name, testNames) {
			continue
		}
		if tiling.Matches(tr, q) {
			if slice, ok := filtered[name]; ok {
				filtered[name] = append(slice, &TraceID{tr: tr, id: id})
			} else {
				filtered[name] = []*TraceID{&TraceID{tr: tr, id: id}}
			}
		}
	}
	t.Stop()

	traceTally := s.tallies.ByTrace()

	// Now create summaries for each test using the filtered set of traces.
	t = timer.New("Tally up the filtered traces")
	lastCommitIndex := tile.LastCommitIndex()
	for name, traces := range filtered {
		digests := map[string]bool{}
		corpus := ""
		for _, trid := range traces {
			corpus = trid.tr.Params()["source_type"]
			if head {
				// Find the last non-missing value in the trace.
				for i := lastCommitIndex; i >= 0; i-- {
					if trid.tr.IsMissing(i) {
						continue
					} else {
						digests[trid.tr.(*types.GoldenTrace).Values[i]] = true
						break
					}
				}
			} else {
				// Use the traceTally if available, otherwise just inspect the trace.
				if t, ok := traceTally[trid.id]; ok {
					for k, _ := range t {
						digests[k] = true
					}
				} else {
					for i := lastCommitIndex; i >= 0; i-- {
						if !trid.tr.IsMissing(i) {
							digests[trid.tr.(*types.GoldenTrace).Values[i]] = true
						}
					}
				}
			}
		}
		ret[name] = s.makeSummary(name, e, s.storages.DiffStore, corpus, util.KeysOfStringSet(digests))
	}
	t.Stop()

	return ret, nil
}
Пример #4
0
func Init(storages *storage.Storage, summaries *summary.Summaries, tallies *tally.Tallies) error {
	exp, err := storages.ExpectationsStore.Get()
	if err != nil {
		return err
	}
	go func() {
		oneRun := func() {
			t := timer.New("warmer one loop")
			for test, sum := range summaries.Get() {
				for _, digest := range sum.UntHashes {
					t := tallies.ByTest()[test]
					if t != nil {
						// Calculate the closest digest for the side effect of filling in the filediffstore cache.
						digesttools.ClosestDigest(test, digest, exp, t, storages.DiffStore, types.POSITIVE)
						digesttools.ClosestDigest(test, digest, exp, t, storages.DiffStore, types.NEGATIVE)
					}
				}
			}
			t.Stop()
			if newExp, err := storages.ExpectationsStore.Get(); err != nil {
				glog.Errorf("warmer: Failed to get expectations: %s", err)
			} else {
				exp = newExp
			}

			// Make sure all images are downloaded. This is necessary, because
			// the front-end doesn't get URLs (generated via DiffStore.AbsPath)
			// which ensures that the image has been downloaded.
			// TODO(stephana): Remove this once the new diffstore is in place.
			tile, err := storages.GetLastTileTrimmed(true)
			if err != nil {
				glog.Errorf("Error retrieving tile: %s", err)
			}
			tileLen := tile.LastCommitIndex() + 1
			traceDigests := make(map[string]bool, tileLen)
			for _, trace := range tile.Traces {
				gTrace := trace.(*types.GoldenTrace)
				for _, digest := range gTrace.Values {
					if digest != types.MISSING_DIGEST {
						traceDigests[digest] = true
					}
				}
			}

			digests := util.KeysOfStringSet(traceDigests)
			glog.Infof("FOUND %d digests to fetch.", len(digests))
			storages.DiffStore.AbsPath(digests)

			if err := warmTrybotDigests(storages, traceDigests); err != nil {
				glog.Errorf("Error retrieving trybot digests: %s", err)
				return
			}
		}

		oneRun()
		for _ = range time.Tick(time.Minute) {
			oneRun()
		}
	}()
	return nil
}
Пример #5
0
// polyTestHandler returns a JSON description for the given test.
//
// Takes an JSON encoded POST body of the following form:
//
//   {
//      test: The name of the test.
//      topFilter=["positive", "negative", "untriaged"]
//      leftFilter=["positive", "negative", "untriaged"]
//      topQuery: "",
//      leftQuery: "",
//      topIncludeIgnores: bool,
//      leftIncludeIgnores: bool,
//      topN: topN,
//      leftN: leftN,
//      head: [true, false],
//   }
//
//
// The return format looks like:
//
// {
//   "top": [img1, img2, ...]
//   "left": [imgA, imgB, ...]
//   "grid": [
//     [diff1A, diff2A, ...],
//     [diff1B, diff2B, ...],
//   ],
//   info: "Could be error or warning.",
// }
//
// Where imgN is serialized PolyTestImgInfo, and
//       diffN is a serialized PolyTestDiffInfo struct.
// Note that this format is what res/imp/grid expects to
// receive.
//
func polyTestHandler(w http.ResponseWriter, r *http.Request) {
	req := &PolyTestRequest{}
	if err := parseJson(r, req); err != nil {
		util.ReportError(w, r, err, "Failed to parse JSON request.")
		return
	}
	exp, err := storages.ExpectationsStore.Get()
	if err != nil {
		util.ReportError(w, r, err, "Failed to load expectations.")
		return
	}
	e := exp.Tests[req.Test]

	topDigests, topTotal, err := imgInfo(req.TopFilter, req.TopQuery, req.Test, e, req.TopN, req.TopIncludeIgnores, req.Sort == "top", req.Dir, req.Digest, req.Head)
	leftDigests, leftTotal, err := imgInfo(req.LeftFilter, req.LeftQuery, req.Test, e, req.LeftN, req.LeftIncludeIgnores, req.Sort == "left", req.Dir, req.Digest, req.Head)

	// Extract out string slices of digests to pass to *AbsPath and storages.DiffStore.Get().
	allDigests := map[string]bool{}
	topDigestMap := map[string]bool{}
	for _, d := range topDigests {
		allDigests[d.Digest] = true
		topDigestMap[d.Digest] = true
	}
	for _, d := range leftDigests {
		allDigests[d.Digest] = true
	}

	topDigestSlice := util.KeysOfStringSet(topDigestMap)
	allDigestsSlice := util.KeysOfStringSet(allDigests)
	full := storages.DiffStore.AbsPath(allDigestsSlice)

	grid := [][]*PolyTestDiffInfo{}
	for _, l := range leftDigests {
		row := []*PolyTestDiffInfo{}
		diffs, err := storages.DiffStore.Get(l.Digest, topDigestSlice)
		if err != nil {
			glog.Errorf("Failed to do diffs: %s", err)
			continue
		}
		for _, t := range topDigests {
			d, ok := diffs[t.Digest]
			if !ok {
				glog.Errorf("Failed to find expected diff for: %s", t.Digest)
				d = &diff.DiffMetrics{
					MaxRGBADiffs: []int{},
				}
			}
			row = append(row, &PolyTestDiffInfo{
				Test:             req.Test,
				TopDigest:        t.Digest,
				LeftDigest:       l.Digest,
				NumDiffPixels:    d.NumDiffPixels,
				PixelDiffPercent: d.PixelDiffPercent,
				MaxRGBADiffs:     d.MaxRGBADiffs,
				DiffImg:          pathToURLConverter(d.PixelDiffFilePath),
				TopImg:           pathToURLConverter(full[t.Digest]),
				LeftImg:          pathToURLConverter(full[l.Digest]),
			})
		}
		grid = append(grid, row)
	}

	p := PolyTestGUI{
		Top:       topDigests,
		Left:      leftDigests,
		Grid:      grid,
		TopTotal:  topTotal,
		LeftTotal: leftTotal,
	}
	if len(p.Top) == 0 || len(p.Left) == 0 {
		p.Message = "Failed to find images that match those filters."
	}
	w.Header().Set("Content-Type", "application/json")
	enc := json.NewEncoder(w)
	if err := enc.Encode(p); err != nil {
		glog.Errorf("Failed to write or encode result: %s", err)
	}
}
Пример #6
0
// searchByIssue searches across the given issue.
func searchByIssue(issue string, q *Query, exp *expstorage.Expectations, parsedQuery url.Values, storages *storage.Storage, tile *tiling.Tile, tallies *tally.Tallies, tileParamSet *paramsets.Summary) ([]*Digest, error) {
	trybotResults, err := storages.TrybotResults.Get(issue)
	if err != nil {
		return nil, err
	}

	// Get a matcher for the ignore rules if we filter ignores.
	var ignoreMatcher ignore.RuleMatcher = nil
	if !q.IncludeIgnores {
		ignoreMatcher, err = storages.IgnoreStore.BuildRuleMatcher()
		if err != nil {
			return nil, fmt.Errorf("Unable to build rules matcher: %s", err)
		}
	}

	// Set up a rule to match the query.
	var queryRule ignore.QueryRule = nil
	if len(parsedQuery) > 0 {
		queryRule = ignore.NewQueryRule(parsedQuery)
	}

	// Aggregate the results into an intermediate representation to avoid
	// passing over the dataset twice.
	inter := map[string]*issueIntermediate{}
	talliesByTest := tallies.ByTest()

	for _, bot := range trybotResults.Bots {
		for _, result := range bot.TestResults {
			expandedParams := util.CopyStringMap(bot.BotParams)
			util.AddParams(expandedParams, result.Params)

			if ignoreMatcher != nil {
				if _, ok := ignoreMatcher(expandedParams); ok {
					continue
				}
			}

			if (queryRule == nil) || queryRule.IsMatch(expandedParams) {
				testName := expandedParams[types.PRIMARY_KEY_FIELD]
				digest := trybotResults.Digests[result.DigestIdx]
				key := testName + ":" + digest
				if !q.IncludeMaster {
					if _, ok := talliesByTest[testName][digest]; ok {
						continue
					}
				}
				if found, ok := inter[key]; ok {
					found.add(expandedParams)
				} else if cl := exp.Classification(testName, digest); !q.excludeClassification(cl) {
					inter[key] = newIssueIntermediate(expandedParams, digest, cl)
				}
			}
		}
	}

	// Build the output and make sure the digest are cached on disk.
	digests := make(map[string]bool, len(inter))
	ret := make([]*Digest, 0, len(inter))
	emptyTraces := &Traces{}
	for _, i := range inter {
		ret = append(ret, &Digest{
			Test:     i.test,
			Digest:   i.digest,
			Status:   i.status.String(),
			ParamSet: i.paramSet,
			Diff:     buildDiff(i.test, i.digest, exp, tile, talliesByTest, nil, storages.DiffStore, tileParamSet, q.IncludeIgnores),
			Traces:   emptyTraces,
		})
		digests[i.digest] = true
	}

	// This ensures that all digests are cached on disk.
	storages.DiffStore.AbsPath(util.KeysOfStringSet(digests))
	return ret, nil
}