func digestFromIntermediate(test, digest string, inter *intermediate, e *expstorage.Expectations, tile *tiling.Tile, tallies *tally.Tallies, blamer *blame.Blamer, diffStore diff.DiffStore, paramset *paramsets.Summary, includeIgnores bool) *Digest { traceTally := tallies.ByTrace() ret := &Digest{ Test: test, Digest: digest, Status: e.Classification(test, digest).String(), Traces: buildTraces(test, digest, inter, e, tile, traceTally, paramset, includeIgnores), Diff: buildDiff(test, digest, inter, e, tile, tallies.ByTest(), blamer, diffStore, paramset, includeIgnores), } return ret }
// searchByIssue searches across the given issue. func searchByIssue(issue string, q *Query, exp *expstorage.Expectations, parsedQuery url.Values, storages *storage.Storage, tile *tiling.Tile, tallies *tally.Tallies, tileParamSet *paramsets.Summary) ([]*Digest, error) { trybotResults, err := storages.TrybotResults.Get(issue) if err != nil { return nil, err } if !q.IncludeIgnores { matcher, err := storages.IgnoreStore.BuildRuleMatcher() if err != nil { return nil, fmt.Errorf("Unable to build rules matcher: %s", err) } for k, v := range trybotResults { if _, ok := matcher(v.Params); ok { delete(trybotResults, k) } } } rule := ignore.NewQueryRule(parsedQuery) // Aggregate the results into an intermediate representation to avoid // passing over the dataset twice. inter := map[string]*issueIntermediate{} for _, tbr := range trybotResults { if rule.IsMatch(tbr.Params) { key := tbr.Test + ":" + tbr.Digest if found, ok := inter[key]; ok { found.add(tbr) } else if cl := exp.Classification(tbr.Test, tbr.Digest); !q.excludeClassification(cl) { inter[key] = newIssueIntermediate(tbr, cl) } } } // Build the output. talliesByTest := tallies.ByTest() ret := make([]*Digest, 0, len(inter)) for _, i := range inter { ret = append(ret, &Digest{ Test: i.test, Digest: i.digest, Status: i.status.String(), ParamSet: i.paramSet, Diff: buildDiff(i.test, i.digest, exp, tile, talliesByTest, nil, storages.DiffStore, tileParamSet, q.IncludeIgnores), }) } return ret, nil }
func Init(storages *storage.Storage, summaries *summary.Summaries, tallies *tally.Tallies) error { exp, err := storages.ExpectationsStore.Get() if err != nil { return err } go func() { oneRun := func() { t := timer.New("warmer one loop") for test, sum := range summaries.Get() { for _, digest := range sum.UntHashes { t := tallies.ByTest()[test] if t != nil { // Calculate the closest digest for the side effect of filling in the filediffstore cache. digesttools.ClosestDigest(test, digest, exp, t, storages.DiffStore, types.POSITIVE) digesttools.ClosestDigest(test, digest, exp, t, storages.DiffStore, types.NEGATIVE) } } } t.Stop() if newExp, err := storages.ExpectationsStore.Get(); err != nil { glog.Errorf("warmer: Failed to get expectations: %s", err) } else { exp = newExp } // Make sure all images are downloaded. This is necessary, because // the front-end doesn't get URLs (generated via DiffStore.AbsPath) // which ensures that the image has been downloaded. // TODO(stephana): Remove this once the new diffstore is in place. tile, err := storages.GetLastTileTrimmed(true) if err != nil { glog.Errorf("Error retrieving tile: %s", err) } tileLen := tile.LastCommitIndex() + 1 traceDigests := make(map[string]bool, tileLen) for _, trace := range tile.Traces { gTrace := trace.(*types.GoldenTrace) for _, digest := range gTrace.Values { if digest != types.MISSING_DIGEST { traceDigests[digest] = true } } } digests := util.KeysOfStringSet(traceDigests) glog.Infof("FOUND %d digests to fetch.", len(digests)) storages.DiffStore.AbsPath(digests) if err := warmTrybotDigests(storages, traceDigests); err != nil { glog.Errorf("Error retrieving trybot digests: %s", err) return } } oneRun() for _ = range time.Tick(time.Minute) { oneRun() } }() return nil }
// searchByIssue searches across the given issue. func searchByIssue(issue string, q *Query, exp *expstorage.Expectations, parsedQuery url.Values, storages *storage.Storage, tile *tiling.Tile, tallies *tally.Tallies, tileParamSet *paramsets.Summary) ([]*Digest, error) { trybotResults, err := storages.TrybotResults.Get(issue) if err != nil { return nil, err } // Get a matcher for the ignore rules if we filter ignores. var ignoreMatcher ignore.RuleMatcher = nil if !q.IncludeIgnores { ignoreMatcher, err = storages.IgnoreStore.BuildRuleMatcher() if err != nil { return nil, fmt.Errorf("Unable to build rules matcher: %s", err) } } // Set up a rule to match the query. var queryRule ignore.QueryRule = nil if len(parsedQuery) > 0 { queryRule = ignore.NewQueryRule(parsedQuery) } // Aggregate the results into an intermediate representation to avoid // passing over the dataset twice. inter := map[string]*issueIntermediate{} talliesByTest := tallies.ByTest() for _, bot := range trybotResults.Bots { for _, result := range bot.TestResults { expandedParams := util.CopyStringMap(bot.BotParams) util.AddParams(expandedParams, result.Params) if ignoreMatcher != nil { if _, ok := ignoreMatcher(expandedParams); ok { continue } } if (queryRule == nil) || queryRule.IsMatch(expandedParams) { testName := expandedParams[types.PRIMARY_KEY_FIELD] digest := trybotResults.Digests[result.DigestIdx] key := testName + ":" + digest if !q.IncludeMaster { if _, ok := talliesByTest[testName][digest]; ok { continue } } if found, ok := inter[key]; ok { found.add(expandedParams) } else if cl := exp.Classification(testName, digest); !q.excludeClassification(cl) { inter[key] = newIssueIntermediate(expandedParams, digest, cl) } } } } // Build the output and make sure the digest are cached on disk. digests := make(map[string]bool, len(inter)) ret := make([]*Digest, 0, len(inter)) emptyTraces := &Traces{} for _, i := range inter { ret = append(ret, &Digest{ Test: i.test, Digest: i.digest, Status: i.status.String(), ParamSet: i.paramSet, Diff: buildDiff(i.test, i.digest, exp, tile, talliesByTest, nil, storages.DiffStore, tileParamSet, q.IncludeIgnores), Traces: emptyTraces, }) digests[i.digest] = true } // This ensures that all digests are cached on disk. storages.DiffStore.AbsPath(util.KeysOfStringSet(digests)) return ret, nil }