// ClosestDigest returns the closest digest of type 'label' to 'digest', or "" if there aren't any positive digests. // // If no digest of type 'label' is found then Closest.Digest is the empty string. func ClosestDigest(test string, digest string, exp *expstorage.Expectations, tallies tally.Tally, diffStore diff.DiffStore, label types.Label) *Closest { ret := newClosest() unavailableDigests := diffStore.UnavailableDigests() if unavailableDigests[digest] { return ret } selected := []string{} for d, _ := range tallies { if !unavailableDigests[d] && (exp.Classification(test, d) == label) { selected = append(selected, d) } } if len(selected) == 0 { return ret } if diffMetrics, err := diffStore.Get(digest, selected); err != nil { glog.Errorf("ClosestDigest: Failed to get diff: %s", err) return ret } else { for digest, diff := range diffMetrics { if delta := combinedDiffMetric(diff.PixelDiffPercent, diff.MaxRGBADiffs); delta < ret.Diff { ret.Digest = digest ret.Diff = delta ret.DiffPixels = diff.PixelDiffPercent ret.MaxRGBA = diff.MaxRGBADiffs } } return ret } }
func buildDetailsGUI(tile *tiling.Tile, exp *expstorage.Expectations, test string, top string, left string, graphs bool, closest bool, includeIgnores bool) *PolyDetailsGUI { ret := &PolyDetailsGUI{ TopStatus: exp.Classification(test, top).String(), LeftStatus: exp.Classification(test, left).String(), Params: []*PerParamCompare{}, Traces: []*Trace{}, TileSize: len(tile.Commits), } topParamSet := paramsetSum.Get(test, top, includeIgnores) leftParamSet := paramsetSum.Get(test, left, includeIgnores) traceNames := []string{} tally := tallies.ByTrace() for id, tr := range tile.Traces { if tr.Params()[types.PRIMARY_KEY_FIELD] == test { traceNames = append(traceNames, id) } } keys := util.UnionStrings(util.KeysOfParamSet(topParamSet), util.KeysOfParamSet(leftParamSet)) sort.Strings(keys) for _, k := range keys { ret.Params = append(ret.Params, &PerParamCompare{ Name: k, Top: safeGet(topParamSet, k), Left: safeGet(leftParamSet, k), }) } // Now build the trace data. if graphs { ret.Traces, ret.OtherDigests = buildTraceData(top, traceNames, tile, tally, exp) ret.Commits = tile.Commits ret.Blame = blamer.GetBlame(test, top, ret.Commits) } // Now find the closest positive and negative digests. t := tallies.ByTest()[test] if closest && t != nil { ret.PosClosest = digesttools.ClosestDigest(test, top, exp, t, storages.DiffStore, types.POSITIVE) ret.NegClosest = digesttools.ClosestDigest(test, top, exp, t, storages.DiffStore, types.NEGATIVE) } if top == left { var err error // Search is only done on the digest. Codesite can't seem to extract the // name of the test reliably from the URL in comment text, yet can get the // digest just fine. This issue should be revisited once we switch to // Monorail. ret.Issues, err = issueTracker.FromQuery(top) if err != nil { glog.Errorf("Failed to load issues for [%s, %s]: %s", test, top, err) } } return ret }
func digestFromIntermediate(test, digest string, inter *intermediate, e *expstorage.Expectations, tile *tiling.Tile, tallies *tally.Tallies, blamer *blame.Blamer, diffStore diff.DiffStore, paramset *paramsets.Summary, includeIgnores bool) *Digest { traceTally := tallies.ByTrace() ret := &Digest{ Test: test, Digest: digest, Status: e.Classification(test, digest).String(), Traces: buildTraces(test, digest, inter, e, tile, traceTally, paramset, includeIgnores), Diff: buildDiff(test, digest, inter, e, tile, tallies.ByTest(), blamer, diffStore, paramset, includeIgnores), } return ret }
// searchTile queries across a tile. func searchTile(q *Query, e *expstorage.Expectations, parsedQuery url.Values, storages *storage.Storage, tile *tiling.Tile, tallies *tally.Tallies, blamer *blame.Blamer, paramset *paramsets.Summary) ([]*Digest, []*tiling.Commit, error) { // TODO Use CommitRange to create a trimmed tile. traceTally := tallies.ByTrace() lastCommitIndex := tile.LastCommitIndex() // Loop over the tile and pull out all the digests that match // the query, collecting the matching traces as you go. Build // up a set of intermediate's that can then be used to calculate // Digest's. // map [test:digest] *intermediate inter := map[string]*intermediate{} for id, tr := range tile.Traces { if tiling.Matches(tr, parsedQuery) { test := tr.Params()[types.PRIMARY_KEY_FIELD] // Get all the digests digests := digestsFromTrace(id, tr, q.Head, lastCommitIndex, traceTally) for _, digest := range digests { cl := e.Classification(test, digest) if q.excludeClassification(cl) { continue } // Fix blamer to make this easier. if q.BlameGroupID != "" { if cl == types.UNTRIAGED { b := blamer.GetBlame(test, digest, tile.Commits) if q.BlameGroupID != blameGroupID(b, tile.Commits) { continue } } else { continue } } key := fmt.Sprintf("%s:%s", test, digest) if i, ok := inter[key]; !ok { inter[key] = newIntermediate(test, digest, id, tr, digests) } else { i.addTrace(id, tr, digests) } } } } // Now loop over all the intermediates and build a Digest for each one. ret := make([]*Digest, 0, len(inter)) for key, i := range inter { parts := strings.Split(key, ":") ret = append(ret, digestFromIntermediate(parts[0], parts[1], i, e, tile, tallies, blamer, storages.DiffStore, paramset, q.IncludeIgnores)) } return ret, tile.Commits, nil }
// searchByIssue searches across the given issue. func searchByIssue(issue string, q *Query, exp *expstorage.Expectations, parsedQuery url.Values, storages *storage.Storage, tile *tiling.Tile, tallies *tally.Tallies, tileParamSet *paramsets.Summary) ([]*Digest, error) { trybotResults, err := storages.TrybotResults.Get(issue) if err != nil { return nil, err } if !q.IncludeIgnores { matcher, err := storages.IgnoreStore.BuildRuleMatcher() if err != nil { return nil, fmt.Errorf("Unable to build rules matcher: %s", err) } for k, v := range trybotResults { if _, ok := matcher(v.Params); ok { delete(trybotResults, k) } } } rule := ignore.NewQueryRule(parsedQuery) // Aggregate the results into an intermediate representation to avoid // passing over the dataset twice. inter := map[string]*issueIntermediate{} for _, tbr := range trybotResults { if rule.IsMatch(tbr.Params) { key := tbr.Test + ":" + tbr.Digest if found, ok := inter[key]; ok { found.add(tbr) } else if cl := exp.Classification(tbr.Test, tbr.Digest); !q.excludeClassification(cl) { inter[key] = newIssueIntermediate(tbr, cl) } } } // Build the output. talliesByTest := tallies.ByTest() ret := make([]*Digest, 0, len(inter)) for _, i := range inter { ret = append(ret, &Digest{ Test: i.test, Digest: i.digest, Status: i.status.String(), ParamSet: i.paramSet, Diff: buildDiff(i.test, i.digest, exp, tile, talliesByTest, nil, storages.DiffStore, tileParamSet, q.IncludeIgnores), }) } return ret, nil }
// buildTraces returns a Trace for the given intermediate. func buildTraces(test, digest string, inter *intermediate, e *expstorage.Expectations, tile *tiling.Tile, traceTally map[string]tally.Tally) *Traces { traceNames := make([]string, 0, len(inter.Traces)) for id, _ := range inter.Traces { traceNames = append(traceNames, id) } ret := &Traces{ TileSize: len(tile.Commits), Traces: []Trace{}, Digests: []DigestStatus{}, } sort.Strings(traceNames) last := tile.LastCommitIndex() y := 0 if len(traceNames) > 0 { ret.Digests = append(ret.Digests, DigestStatus{ Digest: digest, Status: e.Classification(test, digest).String(), }) } for _, id := range traceNames { t, ok := traceTally[id] if !ok { continue } if count, ok := t[digest]; !ok || count == 0 { continue } trace := inter.Traces[id].(*types.GoldenTrace) p := Trace{ Data: []Point{}, ID: id, Params: trace.Params(), } for i := last; i >= 0; i-- { if trace.IsMissing(i) { continue } // s is the status of the digest, it is either 0 for a match, or [1-8] if not. s := 0 if trace.Values[i] != digest { if index := digestIndex(trace.Values[i], ret.Digests); index != -1 { s = index } else { if len(ret.Digests) < 9 { d := trace.Values[i] ret.Digests = append(ret.Digests, DigestStatus{ Digest: d, Status: e.Classification(test, d).String(), }) s = len(ret.Digests) - 1 } else { s = 8 } } } p.Data = append(p.Data, Point{ X: i, Y: y, S: s, }) } sort.Sort(PointSlice(p.Data)) ret.Traces = append(ret.Traces, p) y += 1 } return ret }
// buildTraceData returns a populated []*Trace for all the traces that contain 'digest'. func buildTraceData(digest string, traceNames []string, tile *tiling.Tile, traceTally map[string]tally.Tally, exp *expstorage.Expectations) ([]*Trace, []*DigestStatus) { sort.Strings(traceNames) ret := []*Trace{} last := tile.LastCommitIndex() y := 0 // Keep track of the first 7 non-matching digests we encounter so we can color them differently. otherDigests := []*DigestStatus{} // Populate otherDigests with all the digests, including the one we are comparing against. if len(traceNames) > 0 { // Find the test name so we can look up the triage status. trace := tile.Traces[traceNames[0]].(*types.GoldenTrace) test := trace.Params()[types.PRIMARY_KEY_FIELD] otherDigests = append(otherDigests, &DigestStatus{ Digest: digest, Status: exp.Classification(test, digest).String(), }) } for _, id := range traceNames { t, ok := traceTally[id] if !ok { continue } if count, ok := t[digest]; !ok || count == 0 { continue } trace := tile.Traces[id].(*types.GoldenTrace) p := &Trace{ Data: []Point{}, Label: id, Params: trace.Params(), } for i := last; i >= 0; i-- { if trace.IsMissing(i) { continue } // s is the status of the digest, it is either 0 for a match, or [1-8] if not. s := 0 if trace.Values[i] != digest { if index := digestIndex(trace.Values[i], otherDigests); index != -1 { s = index } else { if len(otherDigests) < 9 { d := trace.Values[i] test := trace.Params()[types.PRIMARY_KEY_FIELD] otherDigests = append(otherDigests, &DigestStatus{ Digest: d, Status: exp.Classification(test, d).String(), }) s = len(otherDigests) - 1 } else { s = 8 } } } p.Data = append(p.Data, Point{ X: i, Y: y, S: s, }) } sort.Sort(PointSlice(p.Data)) ret = append(ret, p) y += 1 } return ret, otherDigests }
// searchByIssue searches across the given issue. func searchByIssue(issue string, q *Query, exp *expstorage.Expectations, parsedQuery url.Values, storages *storage.Storage, tile *tiling.Tile, tallies *tally.Tallies, tileParamSet *paramsets.Summary) ([]*Digest, error) { trybotResults, err := storages.TrybotResults.Get(issue) if err != nil { return nil, err } // Get a matcher for the ignore rules if we filter ignores. var ignoreMatcher ignore.RuleMatcher = nil if !q.IncludeIgnores { ignoreMatcher, err = storages.IgnoreStore.BuildRuleMatcher() if err != nil { return nil, fmt.Errorf("Unable to build rules matcher: %s", err) } } // Set up a rule to match the query. var queryRule ignore.QueryRule = nil if len(parsedQuery) > 0 { queryRule = ignore.NewQueryRule(parsedQuery) } // Aggregate the results into an intermediate representation to avoid // passing over the dataset twice. inter := map[string]*issueIntermediate{} talliesByTest := tallies.ByTest() for _, bot := range trybotResults.Bots { for _, result := range bot.TestResults { expandedParams := util.CopyStringMap(bot.BotParams) util.AddParams(expandedParams, result.Params) if ignoreMatcher != nil { if _, ok := ignoreMatcher(expandedParams); ok { continue } } if (queryRule == nil) || queryRule.IsMatch(expandedParams) { testName := expandedParams[types.PRIMARY_KEY_FIELD] digest := trybotResults.Digests[result.DigestIdx] key := testName + ":" + digest if !q.IncludeMaster { if _, ok := talliesByTest[testName][digest]; ok { continue } } if found, ok := inter[key]; ok { found.add(expandedParams) } else if cl := exp.Classification(testName, digest); !q.excludeClassification(cl) { inter[key] = newIssueIntermediate(expandedParams, digest, cl) } } } } // Build the output and make sure the digest are cached on disk. digests := make(map[string]bool, len(inter)) ret := make([]*Digest, 0, len(inter)) emptyTraces := &Traces{} for _, i := range inter { ret = append(ret, &Digest{ Test: i.test, Digest: i.digest, Status: i.status.String(), ParamSet: i.paramSet, Diff: buildDiff(i.test, i.digest, exp, tile, talliesByTest, nil, storages.DiffStore, tileParamSet, q.IncludeIgnores), Traces: emptyTraces, }) digests[i.digest] = true } // This ensures that all digests are cached on disk. storages.DiffStore.AbsPath(util.KeysOfStringSet(digests)) return ret, nil }