func (w *worker) work(pfsClient pfs.APIClient, ppsClient pps.APIClient) error { opt := w.rand.Float64() switch { case opt < repo: repoName := w.name() if err := pfsutil.CreateRepo(pfsClient, repoName); err != nil { return err } w.repos = append(w.repos, &pfs.Repo{Name: repoName}) commit, err := pfsutil.StartCommit(pfsClient, repoName, "") if err != nil { return err } w.started = append(w.started, commit) case opt < commit: if len(w.started) >= maxStartedCommits { i := w.rand.Intn(len(w.started)) commit := w.started[i] if err := pfsutil.FinishCommit(pfsClient, commit.Repo.Name, commit.Id); err != nil { return err } w.started = append(w.started[:i], w.started[i+1:]...) w.finished = append(w.finished, commit) } else { commit := w.finished[w.rand.Intn(len(w.finished))] commit, err := pfsutil.StartCommit(pfsClient, commit.Repo.Name, commit.Id) if err != nil { return err } w.started = append(w.started, commit) } case opt < file: commit := w.started[w.rand.Intn(len(w.started))] if _, err := pfsutil.PutFile(pfsClient, commit.Repo.Name, commit.Id, w.name(), 0, w.reader()); err != nil { return err } case opt < job: inputs := [5]string{} var inputCommits []*pfs.Commit for i := range inputs { randI := w.rand.Intn(len(w.finished)) inputs[i] = w.finished[randI].Repo.Name inputCommits = append(inputCommits, w.finished[randI]) } var parentJobID string if len(w.jobs) > 0 { parentJobID = w.jobs[w.rand.Intn(len(w.jobs))].Id } outFilename := w.name() job, err := ppsutil.CreateJob( ppsClient, "", []string{"sh"}, w.grepCmd(inputs, outFilename), 1, inputCommits, parentJobID, ) if err != nil { return err } w.jobs = append(w.jobs, job) case opt < pipeline: inputs := [5]string{} var inputRepos []*pfs.Repo for i := range inputs { randI := w.rand.Intn(len(w.repos)) inputs[i] = w.repos[randI].Name inputRepos = append(inputRepos, w.repos[randI]) } pipelineName := w.name() outFilename := w.name() if err := ppsutil.CreatePipeline( ppsClient, pipelineName, "", []string{"sh"}, w.grepCmd(inputs, outFilename), 1, inputRepos, ); err != nil { return err } w.pipelines = append(w.pipelines, ppsutil.NewPipeline(pipelineName)) } return nil }
func TestPipeline(t *testing.T) { t.Parallel() pachClient := getPachClient(t) // create repos dataRepo := uniqueString("TestPipeline.data") require.NoError(t, pfsutil.CreateRepo(pachClient, dataRepo)) // create pipeline pipelineName := uniqueString("pipeline") outRepo := pps.PipelineRepo(ppsutil.NewPipeline(pipelineName)) require.NoError(t, ppsutil.CreatePipeline( pachClient, pipelineName, "", []string{"cp", path.Join("/pfs", dataRepo, "file"), "/pfs/out/file"}, nil, 1, []*pps.PipelineInput{{Repo: &pfs.Repo{Name: dataRepo}}}, )) // Do first commit to repo commit1, err := pfsutil.StartCommit(pachClient, dataRepo, "") require.NoError(t, err) _, err = pfsutil.PutFile(pachClient, dataRepo, commit1.Id, "file", 0, strings.NewReader("foo\n")) require.NoError(t, err) require.NoError(t, pfsutil.FinishCommit(pachClient, dataRepo, commit1.Id)) listCommitRequest := &pfs.ListCommitRequest{ Repo: []*pfs.Repo{outRepo}, CommitType: pfs.CommitType_COMMIT_TYPE_READ, Block: true, } listCommitResponse, err := pachClient.ListCommit( context.Background(), listCommitRequest, ) require.NoError(t, err) outCommits := listCommitResponse.CommitInfo require.Equal(t, 1, len(outCommits)) var buffer bytes.Buffer require.NoError(t, pfsutil.GetFile(pachClient, outRepo.Name, outCommits[0].Commit.Id, "file", 0, 0, "", nil, &buffer)) require.Equal(t, "foo\n", buffer.String()) // Do second commit to repo commit2, err := pfsutil.StartCommit(pachClient, dataRepo, commit1.Id) require.NoError(t, err) _, err = pfsutil.PutFile(pachClient, dataRepo, commit2.Id, "file", 0, strings.NewReader("bar\n")) require.NoError(t, err) require.NoError(t, pfsutil.FinishCommit(pachClient, dataRepo, commit2.Id)) listCommitRequest = &pfs.ListCommitRequest{ Repo: []*pfs.Repo{outRepo}, FromCommit: []*pfs.Commit{outCommits[0].Commit}, CommitType: pfs.CommitType_COMMIT_TYPE_READ, Block: true, } listCommitResponse, err = pachClient.ListCommit( context.Background(), listCommitRequest, ) require.NoError(t, err) require.NotNil(t, listCommitResponse.CommitInfo[0].ParentCommit) require.Equal(t, outCommits[0].Commit.Id, listCommitResponse.CommitInfo[0].ParentCommit.Id) outCommits = listCommitResponse.CommitInfo require.Equal(t, 1, len(outCommits)) buffer = bytes.Buffer{} require.NoError(t, pfsutil.GetFile(pachClient, outRepo.Name, outCommits[0].Commit.Id, "file", 0, 0, "", nil, &buffer)) require.Equal(t, "foo\nbar\n", buffer.String()) }
func (w *worker) work(pfsClient pfs.APIClient, ppsClient pps.APIClient) error { opt := w.rand.Float64() switch { case opt < repo: repoName := w.randString(10) if err := pfsutil.CreateRepo(pfsClient, repoName); err != nil { return err } w.repos = append(w.repos, &pfs.Repo{Name: repoName}) commit, err := pfsutil.StartCommit(pfsClient, repoName, "") if err != nil { return err } w.started = append(w.started, commit) case opt < commit: if len(w.started) >= maxStartedCommits || len(w.finished) == 0 { if len(w.started) == 0 { return nil } i := w.rand.Intn(len(w.started)) commit := w.started[i] if err := pfsutil.FinishCommit(pfsClient, commit.Repo.Name, commit.Id); err != nil { return err } w.started = append(w.started[:i], w.started[i+1:]...) w.finished = append(w.finished, commit) } else { if len(w.finished) == 0 { return nil } commit := w.finished[w.rand.Intn(len(w.finished))] commit, err := pfsutil.StartCommit(pfsClient, commit.Repo.Name, commit.Id) if err != nil { return err } w.started = append(w.started, commit) } case opt < file: if len(w.started) == 0 { return nil } commit := w.started[w.rand.Intn(len(w.started))] if _, err := pfsutil.PutFile(pfsClient, commit.Repo.Name, commit.Id, w.randString(10), 0, w.reader()); err != nil { return err } case opt < job: if len(w.startedJobs) >= maxStartedJobs { job := w.startedJobs[0] w.startedJobs = w.startedJobs[1:] jobInfo, err := ppsClient.InspectJob( context.Background(), &pps.InspectJobRequest{ Job: job, BlockState: true, }, ) if err != nil { return err } if jobInfo.State != pps.JobState_JOB_STATE_SUCCESS { return fmt.Errorf("job %s failed", job.Id) } w.jobs = append(w.jobs, job) } else { if len(w.finished) == 0 { return nil } inputs := [5]string{} var jobInputs []*pps.JobInput repoSet := make(map[string]bool) for i := range inputs { commit := w.finished[w.rand.Intn(len(w.finished))] if _, ok := repoSet[commit.Repo.Name]; ok { continue } repoSet[commit.Repo.Name] = true inputs[i] = commit.Repo.Name jobInputs = append(jobInputs, &pps.JobInput{Commit: commit}) } var parentJobID string if len(w.jobs) > 0 { parentJobID = w.jobs[w.rand.Intn(len(w.jobs))].Id } outFilename := w.randString(10) job, err := ppsutil.CreateJob( ppsClient, "", []string{"bash"}, w.grepCmd(inputs, outFilename), 1, jobInputs, parentJobID, ) if err != nil { return err } w.startedJobs = append(w.startedJobs, job) } case opt < pipeline: if len(w.repos) == 0 { return nil } inputs := [5]string{} var pipelineInputs []*pps.PipelineInput repoSet := make(map[string]bool) for i := range inputs { repo := w.repos[w.rand.Intn(len(w.repos))] if _, ok := repoSet[repo.Name]; ok { continue } repoSet[repo.Name] = true inputs[i] = repo.Name pipelineInputs = append(pipelineInputs, &pps.PipelineInput{Repo: repo}) } pipelineName := w.randString(10) outFilename := w.randString(10) if err := ppsutil.CreatePipeline( ppsClient, pipelineName, "", []string{"bash"}, w.grepCmd(inputs, outFilename), 1, pipelineInputs, ); err != nil { return err } w.pipelines = append(w.pipelines, ppsutil.NewPipeline(pipelineName)) } return nil }