func (c *RemoteImportBuildCmd) Execute(args []string) error {
	cl := NewAPIClientWithAuthIfPresent()

	if GlobalOpt.Verbose {
		log.Printf("Creating a new import-only build for repo %q commit %q", remoteCmd.RepoURI, c.CommitID)
	}

	repo, _, err := cl.Repos.Get(sourcegraph.RepoSpec{URI: remoteCmd.RepoURI}, nil)
	if err != nil {
		return err
	}

	repoSpec := sourcegraph.RepoSpec{URI: remoteCmd.RepoURI}
	repoRevSpec := sourcegraph.RepoRevSpec{RepoSpec: repoSpec, Rev: c.CommitID}

	// Resolve to the full commit ID, and ensure that the remote
	// server knows about the commit.
	commit, err := getCommitWithRefreshAndRetry(cl, repoRevSpec)
	if err != nil {
		return err
	}
	repoRevSpec.CommitID = string(commit.ID)

	build, _, err := cl.Builds.Create(repoRevSpec, &sourcegraph.BuildCreateOptions{
		BuildConfig: sourcegraph.BuildConfig{
			Import: true,
			Queue:  false,
		},
		Force: true,
	})
	if err != nil {
		return err
	}
	if GlobalOpt.Verbose {
		log.Printf("Created build #%d", build.BID)
	}

	now := time.Now()
	host := fmt.Sprintf("local (USER=%s)", os.Getenv("USER"))
	buildUpdate := sourcegraph.BuildUpdate{StartedAt: &now, Host: &host}
	if _, _, err := cl.Builds.Update(build.Spec(), buildUpdate); err != nil {
		return err
	}

	importTask := &sourcegraph.BuildTask{
		BID:   build.BID,
		Op:    sourcegraph.ImportTaskOp,
		Queue: true,
	}
	tasks, _, err := cl.Builds.CreateTasks(build.Spec(), []*sourcegraph.BuildTask{importTask})
	if err != nil {
		return err
	}
	importTask = tasks[0]
	if GlobalOpt.Verbose {
		log.Printf("Created import task #%d", importTask.TaskID)
	}

	// Stream logs.
	done := make(chan struct{})
	go func() {
		var logOpt sourcegraph.BuildGetLogOptions
		loopsSinceLastLog := 0
		for {
			select {
			case <-done:
				return
			case <-time.After(time.Duration(loopsSinceLastLog+1) * 500 * time.Millisecond):
				logs, _, err := cl.Builds.GetTaskLog(importTask.Spec(), &logOpt)
				if err != nil {
					log.Printf("Warning: failed to get build logs: %s.", err)
					return
				}
				if len(logs.Entries) == 0 {
					loopsSinceLastLog++
					continue
				}
				logOpt.MinID = logs.MaxID
				for _, e := range logs.Entries {
					fmt.Println(e)
				}
				loopsSinceLastLog = 0
			}
		}
	}()

	defer func() {
		done <- struct{}{}
	}()
	taskID := importTask.TaskID
	started := false
	log.Printf("# Import queued. Waiting for task #%d in build #%d to start...", importTask.TaskID, build.BID)
	for i, start := 0, time.Now(); ; i++ {
		if time.Since(start) > 45*time.Minute {
			return fmt.Errorf("import timed out after %s", time.Since(start))
		}

		tasks, _, err := cl.Builds.ListBuildTasks(build.Spec(), nil)
		if err != nil {
			return err
		}
		importTask = nil
		for _, task := range tasks {
			if task.TaskID == taskID {
				importTask = task
				break
			}
		}
		if importTask == nil {
			return fmt.Errorf("task #%d not found in task list for build #%d", taskID, build.BID)
		}

		if !started && importTask.StartedAt.Valid {
			log.Printf("# Import started.")
			started = true
		}

		if importTask.EndedAt.Valid {
			if importTask.Success {
				log.Printf("# Import succeeded!")
			} else if importTask.Failure {
				log.Printf("# Import failed!")
				return fmt.Errorf("import failed")
			}
			break
		}

		time.Sleep(time.Duration(i) * 200 * time.Millisecond)
	}

	log.Printf("# View the repository at:")
	log.Printf("# %s://%s/%s@%s", cl.BaseURL.Scheme, cl.BaseURL.Host, repo.URI, repoRevSpec.Rev)

	return nil
}
Beispiel #2
0
func (c *BuildDataListCmd) Execute(args []string) error {
	if c.URLs && c.Local {
		return fmt.Errorf("using --urls is incompatible with the build-data -l/--local option because local build data files do not have a URL")
	}
	if c.URLs {
		c.Long = true
	}
	dir := c.Args.Dir
	if dir == "" {
		dir = "."
	}

	bdfs, repoLabel, err := c.getFileSystem()
	if err != nil {
		return err
	}

	if GlobalOpt.Verbose {
		log.Printf("Listing build files for %s in dir %q", repoLabel, dir)
	}

	// Only used for constructing the URLs for remote build data.
	var repoRevSpec sourcegraph.RepoRevSpec
	if !c.Local {
		cl := NewAPIClientWithAuthIfPresent()
		rrepo, err := getRemoteRepo(cl)
		if err != nil {
			return err
		}
		repoRevSpec.RepoSpec = rrepo.RepoSpec()

		lrepo, err := openLocalRepo()
		if err != nil {
			return err
		}
		repoRevSpec.Rev = lrepo.CommitID
		repoRevSpec.CommitID = lrepo.CommitID
	}

	printFile := func(fi os.FileInfo) {
		if c.Type == "f" && !fi.Mode().IsRegular() {
			return
		}
		if c.Type == "d" && !fi.Mode().IsDir() {
			return
		}

		var suffix string
		if fi.IsDir() {
			suffix = "/"
		}

		var urlStr string
		if c.URLs {
			spec := sourcegraph.BuildDataFileSpec{RepoRev: repoRevSpec, Path: filepath.Join(dir, fi.Name())}

			// TODO(sqs): use sourcegraph.Router when it is merged to go-sourcegraph master
			u, err := router.NewAPIRouter(nil).Get(router.RepoBuildDataEntry).URLPath(router.MapToArray(spec.RouteVars())...)
			if err != nil {
				log.Fatal(err)
			}

			// Strip leading "/" so that the URL is relative to the
			// endpoint URL even if the endpoint URL contains a path.
			urlStr = getEndpointURL().ResolveReference(&url.URL{Path: u.Path[1:]}).String()
		}

		if c.Long {
			var timeStr string
			if !fi.ModTime().IsZero() {
				timeStr = fi.ModTime().Format("Jan _2 15:04")
			}
			fmt.Printf("% 7d %12s %s%s %s\n", fi.Size(), timeStr, fi.Name(), suffix, urlStr)
		} else {
			fmt.Println(fi.Name() + suffix)
		}
	}

	var fis []os.FileInfo
	if c.Recursive {
		w := fs.WalkFS(dir, rwvfs.Walkable(bdfs))
		for w.Step() {
			if err := w.Err(); err != nil {
				return err
			}
			printFile(treeFileInfo{w.Path(), w.Stat()})
		}
	} else {
		fis, err = bdfs.ReadDir(dir)
		if err != nil {
			return err
		}
		for _, fi := range fis {
			printFile(fi)
		}
	}

	return nil
}