func (lmw Logger) Handler(ph http.Handler, c context.Context) http.Handler { handler := func(w http.ResponseWriter, r *http.Request) { rec := util.NewRecorderHijacker(w) uri := r.URL.RequestURI() remoteAddr := webfw.RemoteAddr(r) remoteUser := "" method := r.Method referer := r.Header.Get("Referer") userAgent := r.Header.Get("User-Agent") ph.ServeHTTP(rec, r) for k, v := range rec.Header() { w.Header()[k] = v } w.WriteHeader(rec.GetCode()) w.Write(rec.GetBody().Bytes()) timestamp := time.Now().Format(dateFormat) code := rec.GetCode() length := rec.GetBody().Len() lmw.AccessLogger.Print(fmt.Sprintf("%s - %s [%s] \"%s %s\" %d %d \"%s\" %s", remoteAddr, remoteUser, timestamp, method, uri, code, length, referer, userAgent)) } return http.HandlerFunc(handler) }
func (gmw Gzip) Handler(ph http.Handler, c context.Context) http.Handler { handler := func(w http.ResponseWriter, r *http.Request) { rec := util.NewRecorderHijacker(w) useGzip := strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") ph.ServeHTTP(rec, r) for k, v := range rec.Header() { w.Header()[k] = v } w.Header().Set("Vary", "Accept-Encoding") if useGzip { w.Header().Set("Content-Encoding", "gzip") if w.Header().Get("Content-Type") == "" { w.Header().Set("Content-Type", http.DetectContentType(rec.GetBody().Bytes())) } } if useGzip { buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) gz := gzip.NewWriter(buf) if _, err := gz.Write(rec.GetBody().Bytes()); err != nil { panic(err) } gz.Close() w.Header().Set("Content-Length", strconv.Itoa(buf.Len())) w.WriteHeader(rec.GetCode()) buf.WriteTo(w) } else { w.WriteHeader(rec.GetCode()) w.Write(rec.GetBody().Bytes()) } } return http.HandlerFunc(handler) }
func (smw Session) Handler(ph http.Handler, c context.Context) http.Handler { var abspath string var maxAge, cleanupInterval, cleanupMaxAge time.Duration if filepath.IsAbs(smw.Path) { abspath = smw.Path } else { var err error abspath, err = filepath.Abs(path.Join(filepath.Dir(os.Args[0]), smw.Path)) if err != nil { panic(err) } } if smw.MaxAge != "" { var err error maxAge, err = time.ParseDuration(smw.MaxAge) if err != nil { panic(err) } } logger := webfw.GetLogger(c) if smw.CleanupInterval != "" { var err error cleanupInterval, err = time.ParseDuration(smw.CleanupInterval) if err != nil { panic(err) } cleanupMaxAge, err = time.ParseDuration(smw.CleanupMaxAge) if err != nil { panic(err) } go func() { for _ = range time.Tick(cleanupInterval) { logger.Print("Cleaning up old sessions") if err := context.CleanupSessions(abspath, cleanupMaxAge); err != nil { logger.Printf("Failed to clean up sessions: %v", err) } } }() } handler := func(w http.ResponseWriter, r *http.Request) { uriParts := strings.SplitN(r.RequestURI, "?", 2) if uriParts[0] == "" { uriParts[0] = r.URL.Path } ignore := false for _, prefix := range smw.IgnoreURLPrefix { if prefix[0] == '/' { prefix = prefix[1:] } if strings.HasPrefix(uriParts[0], smw.Pattern+prefix+"/") { ignore = true break } if uriParts[0] == smw.Pattern+prefix { ignore = true break } } if ignore { ph.ServeHTTP(w, r) return } firstTimer := false var sess context.Session if smw.SessionGenerator == nil { sess = context.NewSession(smw.Secret, smw.Cipher, abspath) } else { sess = smw.SessionGenerator(smw.Secret, smw.Cipher, abspath) } sess.SetMaxAge(maxAge) err := sess.Read(r, c) if err != nil && err != context.ErrExpired && err != context.ErrNotExist { sess.SetName(util.UUID()) firstTimer = true if err != context.ErrCookieNotExist { logger.Printf("Error reading session: %v", err) } } c.Set(r, context.BaseCtxKey("session"), sess) c.Set(r, context.BaseCtxKey("firstTimer"), firstTimer) rec := util.NewRecorderHijacker(w) ph.ServeHTTP(rec, r) for k, v := range rec.Header() { w.Header()[k] = v } if sess != nil { if err := sess.Write(w); err != nil { logger.Printf("Unable to write session: %v", err) } } w.WriteHeader(rec.GetCode()) w.Write(rec.GetBody().Bytes()) } return http.HandlerFunc(handler) }
func (mw Static) Handler(ph http.Handler, c context.Context) http.Handler { var expires time.Duration if mw.Expires != "" { var err error expires, err = time.ParseDuration(mw.Expires) if err != nil { panic(err) } } handler := func(w http.ResponseWriter, r *http.Request) { rec := util.NewRecorderHijacker(w) ph.ServeHTTP(rec, r) if rec.GetCode() != http.StatusNotFound { copyRecorder(rec, w) return } uriParts := strings.SplitN(r.RequestURI, "?", 2) if uriParts[0] == "" { uriParts[0] = r.URL.Path } for { if r.Method != "GET" && r.Method != "HEAD" { break } rpath := uriParts[0] if mw.Prefix != "" { if !strings.HasPrefix(rpath, mw.Prefix) { break } rpath = rpath[len(mw.Prefix):] if rpath != "" && rpath[0] != '/' { break } } file, err := fs.DefaultFS.OpenRoot(mw.Path, rpath) if err != nil { break } defer file.Close() stat, err := file.Stat() if err != nil { break } if stat.IsDir() { if !strings.HasSuffix(uriParts[0], "/") { http.Redirect(w, r, uriParts[0]+"/", http.StatusFound) return } index := "index.html" if mw.Index != "" { index = mw.Index } ipath := path.Join(rpath, index) file, err = fs.DefaultFS.OpenRoot(mw.Path, ipath) if err == nil { defer file.Close() stat, err = file.Stat() } if err != nil || stat.IsDir() { if mw.FileList { file, err := fs.DefaultFS.OpenRoot(mw.Path, rpath) if err != nil { break } stats, err := file.Readdir(1000) if err != nil { break } sort.Sort(FileStats(stats)) fileList := &fileList{CurDir: path.Base(rpath), Stats: stats} buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) if err := staticTmpl.Execute(buf, fileList); err != nil { break } if _, err := buf.WriteTo(w); err != nil { break } return } else { break } } else { rpath = ipath } } etag := generateEtag(rpath, stat) w.Header().Set("ETag", etag) if expires != 0 { w.Header().Set("Cache-Control", fmt.Sprintf("max-age=%.0f", expires.Seconds())) w.Header().Set("Expires", time.Now().Add(expires).Format(http.TimeFormat)) } http.ServeContent(w, r, rpath, stat.ModTime(), file) return } copyRecorder(rec, w) } return http.HandlerFunc(handler) }