// StreamAppLog is the same as GetAppLog but returns log lines via an SSE stream func (c *Client) StreamAppLog(appID string, opts *logagg.LogOpts, output chan<- *ct.SSELogChunk) (stream.Stream, error) { path := fmt.Sprintf("/apps/%s/log", appID) if opts != nil { if encodedQuery := opts.EncodedQuery(); encodedQuery != "" { path = fmt.Sprintf("%s?%s", path, encodedQuery) } } return c.Stream("GET", path, nil, output) }
// GetAppLog returns a ReadCloser log stream of the app with ID appID. If lines // is zero or above, the number of lines returned will be capped at that value. // Otherwise, all available logs are returned. If follow is true, new log lines // are streamed after the buffered log. func (c *Client) GetAppLog(appID string, opts *logagg.LogOpts) (io.ReadCloser, error) { path := fmt.Sprintf("/apps/%s/log", appID) if opts != nil { if encodedQuery := opts.EncodedQuery(); encodedQuery != "" { path = fmt.Sprintf("%s?%s", path, encodedQuery) } } res, err := c.RawReq("GET", path, nil, nil, nil) if err != nil { return nil, err } return res.Body, nil }
func (c *controllerAPI) AppLog(ctx context.Context, w http.ResponseWriter, req *http.Request) { ctx, cancel := context.WithCancel(ctx) opts := logagg.LogOpts{ Follow: req.FormValue("follow") == "true", JobID: req.FormValue("job_id"), } if vals, ok := req.Form["process_type"]; ok && len(vals) > 0 { opts.ProcessType = &vals[len(vals)-1] } if streamTypeVals := req.FormValue("stream_types"); streamTypeVals != "" { streamTypes := strings.Split(streamTypeVals, ",") opts.StreamTypes = make([]logagg.StreamType, len(streamTypes)) for i, typ := range streamTypes { opts.StreamTypes[i] = logagg.StreamType(typ) } } if strLines := req.FormValue("lines"); strLines != "" { lines, err := strconv.Atoi(req.FormValue("lines")) if err != nil { respondWithError(w, err) return } opts.Lines = &lines } rc, err := c.logaggc.GetLog(c.getApp(ctx).ID, &opts) if err != nil { respondWithError(w, err) return } if cn, ok := w.(http.CloseNotifier); ok { ch := cn.CloseNotify() go func() { select { case <-ch: rc.Close() case <-ctx.Done(): } }() } defer cancel() defer rc.Close() if !strings.Contains(req.Header.Get("Accept"), "text/event-stream") { w.Header().Set("Content-Type", "text/plain") w.WriteHeader(200) // Send headers right away if following if wf, ok := w.(http.Flusher); ok && opts.Follow { wf.Flush() } fw := httphelper.FlushWriter{Writer: w, Enabled: opts.Follow} io.Copy(fw, rc) return } ch := make(chan *ct.SSELogChunk) l, _ := ctxhelper.LoggerFromContext(ctx) s := sse.NewStream(w, ch, l) defer s.Close() s.Serve() msgc := make(chan *json.RawMessage) go func() { defer close(msgc) dec := json.NewDecoder(rc) for { var m json.RawMessage if err := dec.Decode(&m); err != nil { if err != io.EOF { l.Error("decoding logagg stream", err) } return } msgc <- &m } }() for { select { case m := <-msgc: if m == nil { ch <- &ct.SSELogChunk{Event: "eof"} return } // write to sse select { case ch <- &ct.SSELogChunk{Event: "message", Data: *m}: case <-s.Done: return case <-ctx.Done(): return } case <-s.Done: return case <-ctx.Done(): return } } }
func (s *LogAggregatorTestSuite) TestAPIGetLogBuffer(c *C) { appID := "test-app" msg1 := newMessageForApp(appID, "web.1", "log message 1") msg2 := newMessageForApp(appID, "web.2", "log message 2") msg3 := newMessageForApp(appID, "worker.3", "log message 3") msg4 := newMessageForApp(appID, "web.1", "log message 4") msg5 := newMessageForApp(appID, ".5", "log message 5") s.agg.feed(msg1) s.agg.feed(msg2) s.agg.feed(msg3) s.agg.feed(msg4) s.agg.feed(msg5) runtest := func(opts logagg.LogOpts, expected string) { numLines := -1 if opts.Lines != nil { numLines = *opts.Lines } processType := "<nil>" if opts.ProcessType != nil { processType = *opts.ProcessType } c.Logf("Follow=%t Lines=%d JobID=%q ProcessType=%q", opts.Follow, numLines, opts.JobID, processType) logrc, err := s.client.GetLog(appID, &opts) c.Assert(err, IsNil) defer logrc.Close() assertAllLogsEquals(c, logrc, expected) } tests := []struct { numLogs *int jobID string processType *string expected []*rfc5424.Message }{ { numLogs: typeconv.IntPtr(-1), expected: []*rfc5424.Message{msg1, msg2, msg3, msg4, msg5}, }, { numLogs: typeconv.IntPtr(1), expected: []*rfc5424.Message{msg5}, }, { numLogs: typeconv.IntPtr(1), jobID: "3", expected: []*rfc5424.Message{msg3}, }, { numLogs: typeconv.IntPtr(-1), jobID: "1", expected: []*rfc5424.Message{msg1, msg4}, }, { numLogs: typeconv.IntPtr(-1), processType: typeconv.StringPtr("web"), expected: []*rfc5424.Message{msg1, msg2, msg4}, }, { numLogs: typeconv.IntPtr(-1), processType: typeconv.StringPtr(""), expected: []*rfc5424.Message{msg5}, }, } for _, test := range tests { opts := logagg.LogOpts{ Follow: false, JobID: test.jobID, } if test.processType != nil { opts.ProcessType = test.processType } if test.numLogs != nil { opts.Lines = test.numLogs } expected := "" for _, msg := range test.expected { expected += marshalMessage(msg) } runtest(opts, expected) } }
func runLog(args *docopt.Args, client controller.Client) error { rawOutput := args.Bool["--raw-output"] opts := logagg.LogOpts{ Follow: args.Bool["--follow"], JobID: args.String["--job"], StreamTypes: []logagg.StreamType{ logagg.StreamTypeStdout, logagg.StreamTypeStderr, }, } if ptype, ok := args.String["--process-type"]; ok { opts.ProcessType = &ptype } if strlines := args.String["--number"]; strlines != "" { lines, err := strconv.Atoi(strlines) if err != nil { return err } opts.Lines = &lines } if args.Bool["--init"] { opts.StreamTypes = append(opts.StreamTypes, logagg.StreamTypeInit) } rc, err := client.GetAppLog(mustApp(), &opts) if err != nil { return err } defer rc.Close() var stderr io.Writer = os.Stdout if args.Bool["--split-stderr"] { stderr = os.Stderr } var initOut io.Writer = ioutil.Discard if args.Bool["--init"] { initOut = os.Stderr } dec := json.NewDecoder(rc) for { var msg logaggc.Message err := dec.Decode(&msg) if err == io.EOF { return nil } else if err != nil { return err } var stream io.Writer switch msg.Stream { case logagg.StreamTypeStdout: stream = os.Stdout case logagg.StreamTypeStderr: stream = stderr case logagg.StreamTypeInit: stream = initOut default: continue } if rawOutput { fmt.Fprintln(stream, msg.Msg) } else { tstamp := msg.Timestamp.Format(rfc3339micro) fmt.Fprintf(stream, "%s %s[%s.%s]: %s\n", tstamp, msg.Source, msg.ProcessType, msg.JobID, msg.Msg, ) } } }