Example #1
0
func (cmd *ATCCommand) Runner(args []string) (ifrit.Runner, error) {
	err := cmd.validate()
	if err != nil {
		return nil, err
	}

	logger, reconfigurableSink := cmd.constructLogger()

	cmd.configureMetrics(logger)

	sqlDB, pipelineDBFactory, err := cmd.constructDB(logger)
	if err != nil {
		return nil, err
	}

	trackerFactory := resource.TrackerFactory{}
	workerClient := cmd.constructWorkerPool(logger, sqlDB, trackerFactory)

	tracker := resource.NewTracker(workerClient)
	engine := cmd.constructEngine(sqlDB, workerClient, tracker)

	radarSchedulerFactory := pipelines.NewRadarSchedulerFactory(
		tracker,
		cmd.ResourceCheckingInterval,
		engine,
		sqlDB,
	)

	radarScannerFactory := radar.NewScannerFactory(
		tracker,
		cmd.ResourceCheckingInterval,
		cmd.ExternalURL.String(),
	)

	signingKey, err := cmd.loadOrGenerateSigningKey()
	if err != nil {
		return nil, err
	}

	err = sqlDB.CreateDefaultTeamIfNotExists()
	if err != nil {
		return nil, err
	}

	authValidator := cmd.constructValidator(signingKey, sqlDB)

	err = cmd.updateBasicAuthCredentials(sqlDB)
	if err != nil {
		return nil, err
	}

	jwtReader := auth.JWTReader{
		PublicKey: &signingKey.PublicKey,
	}

	err = cmd.configureOAuthProviders(logger, sqlDB)
	if err != nil {
		return nil, err
	}

	providerFactory := provider.NewOAuthFactory(
		sqlDB,
		cmd.oauthBaseURL(),
		auth.OAuthRoutes,
		auth.OAuthCallback,
	)
	if err != nil {
		return nil, err
	}

	drain := make(chan struct{})

	apiHandler, err := cmd.constructAPIHandler(
		logger,
		reconfigurableSink,
		sqlDB,
		authValidator,
		jwtReader,
		providerFactory,
		signingKey,
		pipelineDBFactory,
		engine,
		workerClient,
		drain,
		radarSchedulerFactory,
		radarScannerFactory,
	)
	if err != nil {
		return nil, err
	}

	oauthHandler, err := auth.NewOAuthHandler(
		logger,
		providerFactory,
		signingKey,
		sqlDB,
	)
	if err != nil {
		return nil, err
	}

	webHandler, err := cmd.constructWebHandler(
		logger,
		authValidator,
		jwtReader,
		pipelineDBFactory,
	)
	if err != nil {
		return nil, err
	}

	members := []grouper.Member{
		{"drainer", drainer(drain)},

		{"web", http_server.New(
			cmd.bindAddr(),
			cmd.constructHTTPHandler(
				webHandler,
				apiHandler,
				oauthHandler,
			),
		)},

		{"debug", http_server.New(
			cmd.debugBindAddr(),
			http.DefaultServeMux,
		)},

		{"pipelines", pipelines.SyncRunner{
			Syncer: cmd.constructPipelineSyncer(
				logger.Session("syncer"),
				sqlDB,
				pipelineDBFactory,
				radarSchedulerFactory,
			),
			Interval: 10 * time.Second,
			Clock:    clock.NewClock(),
		}},

		{"builds", builds.TrackerRunner{
			Tracker: builds.NewTracker(
				logger.Session("build-tracker"),
				sqlDB,
				engine,
			),
			Interval: 10 * time.Second,
			Clock:    clock.NewClock(),
		}},

		{"lostandfound", lostandfound.NewRunner(
			logger.Session("lost-and-found"),
			lostandfound.NewBaggageCollector(
				logger.Session("baggage-collector"),
				workerClient,
				sqlDB,
				pipelineDBFactory,
				cmd.OldResourceGracePeriod,
				24*time.Hour,
			),
			sqlDB,
			clock.NewClock(),
			cmd.ResourceCacheCleanupInterval,
		)},
	}

	members = cmd.appendStaticWorker(logger, sqlDB, members)

	return onReady(grouper.NewParallel(os.Interrupt, members), func() {
		logger.Info("listening", lager.Data{
			"web":   cmd.bindAddr(),
			"debug": cmd.debugBindAddr(),
		})
	}), nil
}
Example #2
0
File: command.go Project: ACPK/atc
func (cmd *ATCCommand) Run(signals <-chan os.Signal, ready chan<- struct{}) error {
	err := cmd.validate()
	if err != nil {
		return err
	}

	logger, reconfigurableSink := cmd.constructLogger()

	cmd.configureMetrics(logger)

	sqlDB, pipelineDBFactory, err := cmd.constructDB(logger)
	if err != nil {
		return err
	}

	workerClient := cmd.constructWorkerPool(logger, sqlDB)

	tracker := resource.NewTracker(workerClient, sqlDB)

	engine := cmd.constructEngine(sqlDB, workerClient, tracker)

	radarSchedulerFactory := pipelines.NewRadarSchedulerFactory(
		tracker,
		cmd.ResourceCheckingInterval,
		engine,
		sqlDB,
	)

	signingKey, err := cmd.loadOrGenerateSigningKey()
	if err != nil {
		return err
	}

	authValidator, basicAuthEnabled := cmd.constructValidator(signingKey)

	oauthProviders, err := cmd.configureOAuthProviders(logger)
	if err != nil {
		return err
	}

	drain := make(chan struct{})

	apiHandler, err := cmd.constructAPIHandler(
		logger,
		reconfigurableSink,
		sqlDB,
		authValidator,
		oauthProviders,
		basicAuthEnabled,
		signingKey,
		pipelineDBFactory,
		engine,
		workerClient,
		drain,
		radarSchedulerFactory,
	)
	if err != nil {
		return err
	}

	oauthHandler, err := auth.NewOAuthHandler(
		logger,
		oauthProviders,
		signingKey,
	)
	if err != nil {
		return err
	}

	webHandler, err := cmd.constructWebHandler(
		logger,
		sqlDB,
		authValidator,
		pipelineDBFactory,
		engine,
	)
	if err != nil {
		return err
	}

	members := []grouper.Member{
		{"drainer", drainer(drain)},

		{"web", http_server.New(
			cmd.bindAddr(),
			cmd.constructHTTPHandler(
				webHandler,
				apiHandler,
				oauthHandler,
			),
		)},

		{"debug", http_server.New(
			cmd.debugBindAddr(),
			http.DefaultServeMux,
		)},

		{"pipelines", pipelines.SyncRunner{
			Syncer: cmd.constructPipelineSyncer(
				logger.Session("syncer"),
				sqlDB,
				pipelineDBFactory,
				radarSchedulerFactory,
			),
			Interval: 10 * time.Second,
			Clock:    clock.NewClock(),
		}},

		{"builds", builds.TrackerRunner{
			Tracker: builds.NewTracker(
				logger.Session("build-tracker"),
				sqlDB,
				engine,
			),
			Interval: 10 * time.Second,
			Clock:    clock.NewClock(),
		}},

		{"lostandfound", lostandfound.NewRunner(
			logger.Session("lost-and-found"),
			lostandfound.NewBaggageCollector(
				logger.Session("baggage-collector"),
				workerClient,
				sqlDB,
				pipelineDBFactory,
			),
			sqlDB,
			clock.NewClock(),
			5*time.Minute,
		)},
	}

	members = cmd.appendStaticWorker(logger, sqlDB, members)

	group := grouper.NewParallel(os.Interrupt, members)

	running := ifrit.Invoke(sigmon.New(group))

	logger.Info("listening", lager.Data{
		"web":   cmd.bindAddr(),
		"debug": cmd.debugBindAddr(),
	})

	close(ready)

	for {
		select {
		case s := <-signals:
			running.Signal(s)
		case err := <-running.Wait():
			if err != nil {
				logger.Error("exited-with-failure", err)
			}

			return err
		}
	}
}
				"workerB": workerB,
				"workerC": workerC,
			}

			fakeWorkerClient.GetWorkerStub = func(name string) (worker.Worker, error) {
				return workerMap[name], nil
			}
			baggageCollectorLogger := lagertest.NewTestLogger("test")

			fakeBaggageCollectorDB = new(fakes.FakeBaggageCollectorDB)
			fakePipelineDBFactory = new(dbfakes.FakePipelineDBFactory)

			baggageCollector = lostandfound.NewBaggageCollector(
				baggageCollectorLogger,
				fakeWorkerClient,
				fakeBaggageCollectorDB,
				fakePipelineDBFactory,
				expectedOldVersionTTL,
				expectedOneOffTTL,
			)

			savedPipeline = db.SavedPipeline{
				Pipeline: db.Pipeline{
					Name: "my-special-pipeline",
					Config: atc.Config{
						Groups:    atc.GroupConfigs{},
						Resources: atc.ResourceConfigs{},
						Jobs: atc.JobConfigs{
							{
								Name: "my-precious-job",
							},
						},
			for _, example := range examples {
				fakeWorkerClient = new(wfakes.FakeClient)
				fakeWorker = new(wfakes.FakeWorker)
				fakeBaggageClaimClient = new(bcfakes.FakeClient)
				fakeWorkerClient.GetWorkerReturns(fakeWorker, nil)
				fakeWorker.VolumeManagerReturns(fakeBaggageClaimClient, true)
				baggageCollectorLogger := lagertest.NewTestLogger("test")

				fakeBaggageCollectorDB = new(fakes.FakeBaggageCollectorDB)
				fakePipelineDBFactory = new(dbfakes.FakePipelineDBFactory)

				baggageCollector = lostandfound.NewBaggageCollector(
					baggageCollectorLogger,
					fakeWorkerClient,
					fakeBaggageCollectorDB,
					fakePipelineDBFactory,
					expectedOldResourceGracePeriod,
					expectedOneOffTTL,
				)

				var savedPipelines []db.SavedPipeline
				fakePipelineDBs := make(map[string]*dbfakes.FakePipelineDB)

				for name, data := range example.pipelineData {
					config := atc.Config{}

					for _, resourceData := range data {
						config.Resources = append(config.Resources, resourceData.config)
					}

					savedPipelines = append(savedPipelines, db.SavedPipeline{