// See ExpectationsStore interface.
func (s *SQLExpectationsStore) Get() (exp *Expectations, err error) {
	// Load the newest record from the database.
	const stmt = `SELECT t1.name, t1.digest, t1.label
	         FROM exp_test_change AS t1
	         JOIN (
	         	SELECT name, digest, MAX(changeid) as changeid
	         	FROM exp_test_change
	         	GROUP BY name, digest ) AS t2
				ON (t1.name = t2.name AND t1.digest = t2.digest AND t1.changeid = t2.changeid)
				WHERE t1.removed IS NULL`

	rows, err := s.vdb.DB.Query(stmt)
	if err != nil {
		return nil, err
	}
	defer util.Close(rows)

	result := map[string]types.TestClassification{}
	for rows.Next() {
		var testName, digest, label string
		if err = rows.Scan(&testName, &digest, &label); err != nil {
			return nil, err
		}
		if _, ok := result[testName]; !ok {
			result[testName] = types.TestClassification(map[string]types.Label{})
		}
		result[testName][digest] = types.LabelFromString(label)
	}

	return &Expectations{
		Tests: result,
	}, nil
}
Exemple #2
0
// polyTriageHandler handles a request to change the triage status of one or more
// digests of one test.
//
// It accepts a POST'd JSON serialization of PolyTriageRequest and updates
// the expectations.
func polyTriageHandler(w http.ResponseWriter, r *http.Request) {
	req := &PolyTriageRequest{}
	if err := parseJson(r, req); err != nil {
		util.ReportError(w, r, err, "Failed to parse JSON request.")
		return
	}
	glog.Infof("Triage request: %#v", req)
	user := login.LoggedInAs(r)
	if user == "" {
		util.ReportError(w, r, fmt.Errorf("Not logged in."), "You must be logged in to triage.")
		return
	}

	// Build the expecations change request from the list of digests passed in.
	digests := req.Digest

	// Or build the expectations change request from filter, query, and include.
	if req.All {
		exp, err := storages.ExpectationsStore.Get()
		if err != nil {
			util.ReportError(w, r, err, "Failed to load expectations.")
			return
		}
		e := exp.Tests[req.Test]
		ii, _, err := imgInfo(req.Filter, req.Query, req.Test, e, -1, req.Include, false, "", "", req.Head)
		digests = []string{}
		for _, d := range ii {
			digests = append(digests, d.Digest)
		}
	}

	// Label the digests.
	labelledDigests := map[string]types.Label{}
	for _, d := range digests {
		labelledDigests[d] = types.LabelFromString(req.Status)
	}

	tc := map[string]types.TestClassification{
		req.Test: labelledDigests,
	}

	// Otherwise update the expectations directly.
	if err := storages.ExpectationsStore.AddChange(tc, user); err != nil {
		util.ReportError(w, r, err, "Failed to store the updated expectations.")
		return
	}

	w.Header().Set("Content-Type", "application/json")
	enc := json.NewEncoder(w)
	if err := enc.Encode(map[string]string{}); err != nil {
		glog.Errorf("Failed to write or encode result: %s", err)
	}
}
// getExpectationsAt returns the changes that are necessary to restore the values
// at the given triage change.
func (s *SQLExpectationsStore) getExpectationsAt(changeInfo *TriageLogEntry) (map[string]types.TestClassification, error) {
	const stmtTmpl = `SELECT * FROM (
						SELECT tc.name AS name, tc.digest AS digest, tc.label AS label
	          FROM exp_change AS ec, exp_test_change AS tc
						WHERE ((tc.removed IS NULL) OR ((tc.removed IS NOT NULL) AND (tc.removed > ?))) AND
						      (ec.ts < ?) AND
								  (ec.id = tc.changeid) AND
								  ((tc.name, tc.digest) IN (%s))
						ORDER BY ec.ts DESC) AS T
					GROUP BY name, digest`

	if len(changeInfo.Details) == 0 {
		return map[string]types.TestClassification{}, nil
	}

	// Extract the digests that we are interested in. .
	ret := map[string]types.TestClassification{}
	listArgs := []interface{}{changeInfo.TS, changeInfo.TS}
	placeHolders := []string{}
	for _, d := range changeInfo.Details {
		if _, ok := ret[d.TestName]; !ok {
			ret[d.TestName] = map[string]types.Label{}
		}
		ret[d.TestName][d.Digest] = types.UNTRIAGED
		listArgs = append(listArgs, d.TestName, d.Digest)
		placeHolders = append(placeHolders, "(?,?)")
	}

	// Add the necessary amount of placeholders to the SQL query.
	stmt := fmt.Sprintf(stmtTmpl, strings.Join(placeHolders, ","))

	// Fetch the records we are interested in.
	rows, err := s.vdb.DB.Query(stmt, listArgs...)
	if err != nil {
		return nil, err
	}
	defer util.Close(rows)

	var name, digest, label string
	for rows.Next() {
		if err = rows.Scan(&name, &digest, &label); err != nil {
			return nil, err
		}
		ret[name][digest] = types.LabelFromString(label)
	}

	return ret, nil
}
Exemple #4
0
// imgInfo returns a populated slice of PolyTestImgInfo based on the filter and
// queryString passed in.
//
// max maybe set to -1, which means to not truncate the response digest slice.
// If sortAgainstHash is true then the result will be sorted in direction 'dir' versus the given 'digest',
// otherwise the results will be sorted in terms of ascending N.
//
// If head is true then only return digests that appear at head.
func imgInfo(filter, queryString, testName string, e types.TestClassification, max int, includeIgnores bool, sortAgainstHash bool, dir string, digest string, head bool) ([]*PolyTestImgInfo, int, error) {
	query, err := url.ParseQuery(queryString)
	if err != nil {
		return nil, 0, fmt.Errorf("Failed to parse Query in imgInfo: %s", err)
	}
	query[types.PRIMARY_KEY_FIELD] = []string{testName}

	t := timer.New("finding digests")
	digests := map[string]int{}
	if head {
		tile, err := storages.GetLastTileTrimmed(includeIgnores)
		if err != nil {
			return nil, 0, fmt.Errorf("Failed to retrieve tallies in imgInfo: %s", err)
		}
		lastCommitIndex := tile.LastCommitIndex()
		for _, tr := range tile.Traces {
			if tiling.Matches(tr, query) {
				for i := lastCommitIndex; i >= 0; i-- {
					if tr.IsMissing(i) {
						continue
					} else {
						digests[tr.(*types.GoldenTrace).Values[i]] = 1
						break
					}
				}
			}
		}
	} else {
		digests, err = tallies.ByQuery(query, includeIgnores)
		if err != nil {
			return nil, 0, fmt.Errorf("Failed to retrieve tallies in imgInfo: %s", err)
		}
	}
	glog.Infof("Num Digests: %d", len(digests))
	t.Stop()

	// If we are going to sort against a digest then we need to calculate
	// the diff metrics against that digest.
	diffMetrics := map[string]*diff.DiffMetrics{}
	if sortAgainstHash {
		digestSlice := make([]string, len(digests))
		for d, _ := range digests {
			digestSlice = append(digestSlice, d)
		}
		var err error
		diffMetrics, err = storages.DiffStore.Get(digest, digestSlice)
		if err != nil {
			return nil, 0, fmt.Errorf("Failed to calculate diffs to sort against: %s", err)
		}
	}

	label := types.LabelFromString(filter)
	// Now filter digests by their expectations status here.
	t = timer.New("apply expectations")
	ret := []*PolyTestImgInfo{}
	for digest, n := range digests {
		if e[digest] != label {
			continue
		}
		p := &PolyTestImgInfo{
			Digest: digest,
			N:      n,
		}
		if sortAgainstHash {
			p.PixelDiffPercent = diffMetrics[digest].PixelDiffPercent
		}
		ret = append(ret, p)
	}
	t.Stop()

	if sortAgainstHash {
		if dir == "asc" {
			sort.Sort(PolyTestImgInfoDiffAscSlice(ret))
		} else {
			sort.Sort(sort.Reverse(PolyTestImgInfoDiffAscSlice(ret)))
		}
	} else {
		sort.Sort(PolyTestImgInfoSlice(ret))
	}

	total := len(ret)
	if max > 0 && len(ret) > max {
		ret = ret[:max]
	}
	return ret, total, nil
}