Exemple #1
0
// GCSBasedStable is a version of Stable function that depends on files stored in GCS instead of Jenkis
func (e *E2ETester) GCSBasedStable() bool {
	for _, job := range e.JobNames {
		lastBuildNumber, err := utils.GetLastestBuildNumberFromJenkinsGoogleBucket(job)
		glog.V(4).Infof("Checking status of %v, %v", job, lastBuildNumber)
		if err != nil {
			glog.Errorf("Error while getting data for %v: %v", job, err)
			continue
		}
		if !checkFinishedStatus(job, lastBuildNumber) {
			return false
		}
	}

	return true
}
func (g *GoogleGCSDownloader) getData() (TestToBuildData, sets.String, sets.String, error) {
	fmt.Print("Getting Data from GCS...\n")
	buildLatency := TestToBuildData{}
	resources := sets.NewString()
	methods := sets.NewString()

	buildNumber := g.startFrom
	lastBuildNo, err := utils.GetLastestBuildNumberFromJenkinsGoogleBucket(jobName)
	if err != nil {
		return buildLatency, resources, methods, err
	}
	if buildNumber < lastBuildNo-100 {
		buildNumber = lastBuildNo - 100
	}

	for ; buildNumber <= lastBuildNo; buildNumber++ {
		fmt.Printf("Fetching build %v...\n", buildNumber)
		testDataResponse, err := utils.GetFileFromJenkinsGoogleBucket(jobName, buildNumber, logFile)
		if err != nil {
			fmt.Fprintf(os.Stderr, "Error while fetching data: %v\n", err)
			continue
		}

		testDataBody := testDataResponse.Body
		defer testDataBody.Close()
		testDataScanner := bufio.NewScanner(testDataBody)

		hist := parseTestOutput(testDataScanner, buildNumber, resources, methods)

		for k, v := range hist {
			if _, ok := buildLatency[k]; !ok {
				buildLatency[k] = make(BuildLatencyData)
			}
			buildLatency[k][fmt.Sprintf("%d", buildNumber)] = v
		}
	}

	return buildLatency, resources, methods, nil
}
Exemple #3
0
// GCSWeakStable is a version of GCSBasedStable with a slightly relaxed condition.
// This function says that e2e's are unstable only if there were real test failures
// (i.e. there was a test that failed, so no timeouts/cluster startup failures counts),
// or test failed for any reason 3 times in a row.
func (e *E2ETester) GCSWeakStable() bool {
	for _, job := range e.WeakStableJobNames {
		lastBuildNumber, err := utils.GetLastestBuildNumberFromJenkinsGoogleBucket(job)
		glog.Infof("Checking status of %v, %v", job, lastBuildNumber)
		if err != nil {
			glog.Errorf("Error while getting data for %v: %v", job, err)
			continue
		}
		if checkFinishedStatus(job, lastBuildNumber) {
			continue
		}

		// If we're here it means that build failed, so we need to look for a reason
		// by iterating over junit_XX.xml files and look for failures
		i := 0
		for {
			i++
			path := fmt.Sprintf("artifacts/junit_%02d.xml", i)
			response, err := utils.GetFileFromJenkinsGoogleBucket(job, lastBuildNumber, path)
			if err != nil {
				glog.Errorf("Error while getting data for %v/%v/%v: %v", job, lastBuildNumber, path, err)
				continue
			}
			if response.StatusCode != 200 {
				break
			}
			defer response.Body.Close()
			reader := bufio.NewReader(response.Body)
			body, err := reader.ReadString('\n')
			if err != nil {
				glog.Errorf("Failed to read the response for %v/%v/%v: %v", job, lastBuildNumber, path, err)
				continue
			}
			if strings.TrimSpace(body) != expectedXMLHeader {
				glog.Errorf("Invalid header for %v/%v/%v: %v, expected %v", job, lastBuildNumber, path, body, expectedXMLHeader)
				continue
			}
			body, err = reader.ReadString('\n')
			if err != nil {
				glog.Errorf("Failed to read the response for %v/%v/%v: %v", job, lastBuildNumber, path, err)
				continue
			}
			numberOfTests := 0
			nubmerOfFailures := 0
			timestamp := 0.0
			fmt.Sscanf(strings.TrimSpace(body), "<testsuite tests=\"%d\" failures=\"%d\" time=\"%f\">", &numberOfTests, &nubmerOfFailures, &timestamp)
			glog.V(4).Infof("%v, numberOfTests: %v, numberOfFailures: %v", string(body), numberOfTests, nubmerOfFailures)
			if nubmerOfFailures > 0 {
				glog.V(4).Infof("Found failure in %v for job %v build number %v", path, job, lastBuildNumber)
				return false
			}
		}

		// If we're here it means that we weren't able to find a test that failed, which means that the reason of build failure is comming from the infrastructure
		// Check results of previous two builds.
		if !checkFinishedStatus(job, lastBuildNumber-1) && !checkFinishedStatus(job, lastBuildNumber-2) {
			return false
		}
	}
	return true
}