func GetStream(c *gin.Context) { repo := session.Repo(c) buildn, _ := strconv.Atoi(c.Param("build")) jobn, _ := strconv.Atoi(c.Param("number")) c.Writer.Header().Set("Content-Type", "text/event-stream") build, err := store.GetBuildNumber(c, repo, buildn) if err != nil { log.Debugln("stream cannot get build number.", err) c.AbortWithError(404, err) return } job, err := store.GetJobNumber(c, build, jobn) if err != nil { log.Debugln("stream cannot get job number.", err) c.AbortWithError(404, err) return } rc, err := stream.Reader(c, stream.ToKey(job.ID)) if err != nil { c.AbortWithError(404, err) return } go func() { <-c.Writer.CloseNotify() rc.Close() }() var line int var scanner = bufio.NewScanner(rc) for scanner.Scan() { line++ var err = sse.Encode(c.Writer, sse.Event{ Id: strconv.Itoa(line), Event: "message", Data: scanner.Text(), }) if err != nil { break } c.Writer.Flush() } log.Debugf("Closed stream %s#%d", repo.FullName, build.Number) }
// Stream streams the logs to disk or memory for broadcasing to listeners. Once // the stream is closed it is moved to permanent storage in the database. func Stream(c *gin.Context) { id, err := strconv.ParseInt(c.Param("id"), 10, 64) if err != nil { c.String(500, "Invalid input. %s", err) return } key := c.Param("id") logrus.Infof("Agent %s creating stream %s.", c.ClientIP(), key) wc, err := stream.Writer(c, key) if err != nil { c.String(500, "Failed to create stream writer. %s", err) return } defer func() { wc.Close() stream.Delete(c, key) }() io.Copy(wc, c.Request.Body) rc, err := stream.Reader(c, key) if err != nil { c.String(500, "Failed to create stream reader. %s", err) return } wg := sync.WaitGroup{} wg.Add(1) go func() { defer recover() store.WriteLog(c, &model.Job{ID: id}, rc) wg.Done() }() wc.Close() wg.Wait() c.String(200, "") logrus.Debugf("Agent %s wrote stream to database", c.ClientIP()) }