// RenderGzip a JSON response using gzip compression. func (j JSON) RenderGzip(ctx *fasthttp.RequestCtx, v interface{}) error { if j.StreamingJSON { return j.renderStreamingJSONGzip(ctx, v) } var result []byte var err error if j.Indent { result, err = json.MarshalIndent(v, "", " ") result = append(result, '\n') } else { result, err = json.Marshal(v) } if err != nil { return err } ctx.Response.Header.Add("Content-Encoding", "gzip") // Unescape HTML if needed. if j.UnEscapeHTML { result = bytes.Replace(result, []byte("\\u003c"), []byte("<"), -1) result = bytes.Replace(result, []byte("\\u003e"), []byte(">"), -1) result = bytes.Replace(result, []byte("\\u0026"), []byte("&"), -1) } w := gzip.NewWriter(ctx.Response.BodyWriter()) // JSON marshaled fine, write out the result. j.Head.Write(ctx) if len(j.Prefix) > 0 { w.Write(j.Prefix) } w.Write(result) w.Close() return nil }
func gzipHandler(fn http.HandlerFunc) func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) { // if header doesn't indicate gzip is accepted, return as is. if !strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") { fn(w, r) return } w.Header().Set("Content-Encoding", "gzip") switch { case strings.Contains(r.URL.Path, ".css"): w.Header().Set("Content-Type", "text/css") case strings.Contains(r.URL.Path, ".js"): w.Header().Set("Content-Type", "application/javascript") case strings.Contains(r.URL.Path, ".html"): w.Header().Set("Content-Type", "text/html") case strings.Contains(r.URL.Path, ".ttf"): w.Header().Set("Content-Type", "application/x-font-ttf") case strings.Contains(r.URL.Path, ".woff"): w.Header().Set("Content-Type", "application/font-woff") default: w.Header().Set("Content-Type", "text/html") } gz := gzip.NewWriter(w) defer gz.Close() fn(gzipResponseWriter{Writer: gz, ResponseWriter: w}, r) } }
// RenderGzip a JSONP response using gzip compression. func (j JSONP) RenderGzip(ctx *fasthttp.RequestCtx, v interface{}) error { var result []byte var err error if j.Indent { result, err = json.MarshalIndent(v, "", " ") } else { result, err = json.Marshal(v) } if err != nil { return err } w := gzip.NewWriter(ctx.Response.BodyWriter()) ctx.Response.Header.Add("Content-Encoding", "gzip") // JSON marshaled fine, write out the result. j.Head.Write(ctx) w.Write([]byte(j.Callback + "(")) w.Write(result) w.Write([]byte(");")) // If indenting, append a new line. if j.Indent { w.Write([]byte("\n")) } w.Close() return nil }
func (j JSON) renderStreamingJSONGzip(ctx *fasthttp.RequestCtx, v interface{}) error { ctx.Response.Header.Add("Content-Encoding", "gzip") j.Head.Write(ctx) w := gzip.NewWriter(ctx.Response.BodyWriter()) if len(j.Prefix) > 0 { w.Write(j.Prefix) } w.Close() return json.NewEncoder(w).Encode(v) }
// TarGz creates a .tar.gz file at targzPath containing // the contents of files listed in filePaths. func TarGz(targzPath string, filePaths []string) error { out, err := os.Create(targzPath) if err != nil { return err } defer out.Close() gzWriter := gzip.NewWriter(out) defer gzWriter.Close() tarWriter := tar.NewWriter(gzWriter) defer tarWriter.Close() for _, fpath := range filePaths { infile, err := os.Open(fpath) if err != nil { return err } infileInfo, err := infile.Stat() if err != nil { infile.Close() return err } fileHeader, err := tar.FileInfoHeader(infileInfo, fpath) if err != nil { infile.Close() return err } err = tarWriter.WriteHeader(fileHeader) if err != nil { infile.Close() return err } _, err = io.Copy(tarWriter, infile) if err != nil { infile.Close() return err } infile.Close() } return nil }
func archive(outpath string, r io.Reader, extra []byte) (int64, error) { br := bufio.NewReader(r) err := os.MkdirAll(filepath.Dir(outpath), 0777) if err != nil { return 0, err } outfile, err := os.Create(outpath) if err != nil { return 0, err } cw := &countWriter{ w: outfile, } bufout := bufio.NewWriter(cw) zipWriter := gzip.NewWriter(bufout) zipWriter.Header.ModTime = time.Time{} zipWriter.Header.OS = 0 if len(extra) > 0 { zipWriter.Header.Extra = extra } _, err = io.Copy(zipWriter, br) if err != nil { return 0, err } err = zipWriter.Close() if err != nil { return 0, err } bufout.Flush() err = outfile.Close() if err != nil { return 0, err } return cw.count, nil }
// Wopen opens a buffered reader. // If f == "-", then stdout will be used. // If f endswith ".gz", then the output will be gzipped. func Wopen(f string) (*Writer, error) { var wtr *os.File var err error if f == "-" { wtr = os.Stdout } else { wtr, err = os.Create(f) if err != nil { return nil, err } } size := getSize() if !strings.HasSuffix(f, ".gz") { return &Writer{bufio.NewWriterSize(wtr, size), wtr, nil}, nil } gz := gzip.NewWriter(wtr) return &Writer{bufio.NewWriterSize(gz, size), wtr, gz}, nil }
// RenderGzip an XML response using gzip compression. func (x XML) RenderGzip(ctx *fasthttp.RequestCtx, v interface{}) error { var result []byte var err error if x.Indent { result, err = xml.MarshalIndent(v, "", " ") result = append(result, '\n') } else { result, err = xml.Marshal(v) } if err != nil { return err } ctx.Response.Header.Add("Content-Encoding", "gzip") // XML marshaled fine, write out the result. x.Head.Write(ctx) w := gzip.NewWriter(ctx.Response.BodyWriter()) if len(x.Prefix) > 0 { w.Write(x.Prefix) } w.Write(result) w.Close() return nil }
func (h ProxyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { activeConnections.Add(1) defer activeConnections.Done() conf := GetConfig() if !conf.ACLsLoaded { http.Error(w, "Redwood proxy configuration needs to be updated for this version of Redwood.\n(Use ACLs)", 500) return } if len(r.URL.String()) > 10000 { http.Error(w, "URL too long", http.StatusRequestURITooLong) return } client := r.RemoteAddr host, _, err := net.SplitHostPort(client) if err == nil { client = host } if conf.AuthCacheTime > 0 { auth := r.Header.Get("Proxy-Authorization") if auth == "" { authCacheLock.RLock() ar, ok := authCache[client] authCacheLock.RUnlock() if ok && time.Now().Sub(ar.Time) < time.Duration(conf.AuthCacheTime)*time.Second { r.Header.Set("Proxy-Authorization", ar.ProxyAuthorization) } } else { authCacheLock.Lock() authCache[client] = authRecord{ ProxyAuthorization: auth, Time: time.Now(), } authCacheLock.Unlock() } } if r.Header.Get("Proxy-Authorization") != "" { user, pass := ProxyCredentials(r) if !conf.ValidCredentials(user, pass) { log.Printf("Incorrect username or password from %v: %q:%q", r.RemoteAddr, user, pass) r.Header.Del("Proxy-Authorization") } } // Reconstruct the URL if it is incomplete (i.e. on a transparent proxy). if r.URL.Host == "" { r.URL.Host = r.Host } if r.URL.Scheme == "" { if h.TLS { r.URL.Scheme = "https" } else { r.URL.Scheme = "http" } } var userAgent string if conf.LogUserAgent { userAgent = r.Header.Get("User-Agent") } if realHost, ok := conf.VirtualHosts[r.Host]; ok { r.Host = realHost r.URL.Host = realHost } user := client var authUser string if h.user != "" { authUser = h.user } else if u, _ := ProxyCredentials(r); u != "" { authUser = u } if authUser != "" { user = authUser } tally := conf.URLRules.MatchingRules(r.URL) scores := conf.categoryScores(tally) categories := conf.significantCategories(scores) reqACLs := conf.ACLs.requestACLs(r, authUser) possibleActions := []string{ "allow", "block", "block-invisible", } if r.Header.Get("Proxy-Authorization") == "" && !h.TLS { possibleActions = append(possibleActions, "require-auth") } if r.Method == "CONNECT" && conf.TLSReady { possibleActions = append(possibleActions, "ssl-bump") } thisRule, ignored := conf.ChooseACLCategoryAction(reqACLs, categories, possibleActions...) if r.Method == "CONNECT" && conf.TLSReady && thisRule.Action == "" { // If the result is unclear, go ahead and start to bump the connection. // The ACLs will be checked one more time anyway. thisRule.Action = "ssl-bump" } switch thisRule.Action { case "require-auth": conf.send407(w) log.Printf("Missing required proxy authentication from %v to %v", r.RemoteAddr, r.URL) return case "block": conf.showBlockPage(w, r, user, tally, scores, thisRule) logAccess(r, nil, 0, false, user, tally, scores, thisRule, "", ignored, userAgent) return case "block-invisible": showInvisibleBlock(w) logAccess(r, nil, 0, false, user, tally, scores, thisRule, "", ignored, userAgent) return case "ssl-bump": conn, err := newHijackedConn(w) if err != nil { fmt.Fprintln(conn, "HTTP/1.1 500 Internal Server Error") fmt.Fprintln(conn) fmt.Fprintln(conn, err) conn.Close() return } fmt.Fprint(conn, "HTTP/1.1 200 Connection Established\r\n\r\n") SSLBump(conn, r.URL.Host, user, authUser) return } if r.Host == localServer { conf.ServeMux.ServeHTTP(w, r) return } if r.Method == "CONNECT" { conn, err := newHijackedConn(w) if err != nil { fmt.Fprintln(conn, "HTTP/1.1 500 Internal Server Error") fmt.Fprintln(conn) fmt.Fprintln(conn, err) conn.Close() return } fmt.Fprint(conn, "HTTP/1.1 200 Connection Established\r\n\r\n") logAccess(r, nil, 0, false, user, tally, scores, thisRule, "", ignored, userAgent) connectDirect(conn, r.URL.Host, nil) return } if r.Header.Get("Upgrade") == "websocket" { h.makeWebsocketConnection(w, r) return } r.Header.Add("Via", r.Proto+" Redwood") r.Header.Add("X-Forwarded-For", client) gzipOK := !conf.DisableGZIP && strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") && !lanAddress(client) r.Header.Del("Accept-Encoding") urlChanged := conf.changeQuery(r.URL) if !urlChanged { // Rebuild the URL in a way that will preserve which characters are escaped // and which aren't, for compatibility with broken servers. rawURL := r.RequestURI if strings.HasPrefix(rawURL, r.URL.Scheme) { rawURL = rawURL[len(r.URL.Scheme):] rawURL = strings.TrimPrefix(rawURL, "://") slash := strings.Index(rawURL, "/") if slash == -1 { rawURL = "/" } else { rawURL = rawURL[slash:] } } q := strings.Index(rawURL, "?") if q != -1 { rawURL = rawURL[:q] } if strings.HasPrefix(rawURL, "//") { // The path should start with a single slash not two. rawURL = rawURL[1:] } r.URL.Opaque = rawURL } proxied := false var rt http.RoundTripper if h.rt == nil { if r.URL.Opaque != "" && transport.Proxy != nil { if p, _ := transport.Proxy(r); p != nil { // If the request is going through a proxy, the host needs to be // included in the opaque element. r.URL.Opaque = "//" + r.URL.Host + r.URL.Opaque proxied = true } } rt = &transport } else { rt = h.rt } if !proxied { r.Header.Del("Proxy-Authorization") } resp, err := rt.RoundTrip(r) r.URL.Opaque = "" if err != nil { http.Error(w, err.Error(), http.StatusServiceUnavailable) log.Printf("error fetching %s: %s", r.URL, err) logAccess(r, nil, 0, false, user, tally, scores, thisRule, "", ignored, userAgent) return } defer resp.Body.Close() // Prevent switching to QUIC. resp.Header.Del("Alternate-Protocol") originalContentType := resp.Header.Get("Content-Type") fixContentType(resp) respACLs := conf.ACLs.responseACLs(resp) acls := unionACLSets(reqACLs, respACLs) thisRule, ignored = conf.ChooseACLCategoryAction(acls, categories, "allow", "block", "block-invisible", "hash-image", "phrase-scan") if thisRule.Action == "" { thisRule.Action = "allow" } switch thisRule.Action { case "allow": resp.Header.Set("Content-Type", originalContentType) copyResponseHeader(w, resp) n, err := io.Copy(w, resp.Body) if err != nil { log.Printf("error while copying response (URL: %s): %s", r.URL, err) } logAccess(r, resp, int(n), false, user, tally, scores, thisRule, "", ignored, userAgent) return case "block": conf.showBlockPage(w, r, user, tally, scores, thisRule) logAccess(r, resp, 0, false, user, tally, scores, thisRule, "", ignored, userAgent) return case "block-invisible": showInvisibleBlock(w) logAccess(r, resp, 0, false, user, tally, scores, thisRule, "", ignored, userAgent) return } lr := &io.LimitedReader{ R: resp.Body, N: 1e6, } content, err := ioutil.ReadAll(lr) if err != nil { log.Printf("error while reading response body (URL: %s): %s", r.URL, err) } if lr.N == 0 { log.Println("response body too long to filter:", r.URL) resp.Header.Set("Content-Type", originalContentType) var dest io.Writer = w if gzipOK { resp.Header.Set("Content-Encoding", "gzip") resp.Header.Del("Content-Length") gzw := gzip.NewWriter(w) defer gzw.Close() dest = gzw } copyResponseHeader(w, resp) dest.Write(content) n, err := io.Copy(dest, resp.Body) if err != nil { log.Printf("error while copying response (URL: %s): %s", r.URL, err) } logAccess(r, resp, int(n)+len(content), false, user, tally, scores, ACLActionRule{Action: "allow", Needed: []string{"too-long-to-filter"}}, "", ignored, userAgent) return } modified := false pageTitle := "" switch thisRule.Action { case "phrase-scan": contentType := resp.Header.Get("Content-Type") _, cs, _ := charset.DetermineEncoding(content, contentType) if strings.Contains(contentType, "html") { var doc *html.Node if conf.LogTitle { doc, err = parseHTML(content, cs) if err != nil { log.Printf("Error parsing HTML from %s: %s", r.URL, err) } else { t := titleSelector.MatchFirst(doc) if t != nil { if titleText := t.FirstChild; titleText != nil && titleText.Type == html.TextNode { pageTitle = titleText.Data } } } } modified = conf.pruneContent(r.URL, &content, cs, acls, doc) if modified { resp.Header.Set("Content-Type", "text/html; charset=utf-8") cs = "utf-8" resp.Header.Del("Content-Length") } } conf.scanContent(content, contentType, cs, tally) case "hash-image": img, _, err := image.Decode(bytes.NewReader(content)) if err != nil { log.Printf("Error decoding image from %v: %v", r.URL, err) break } hash := dhash.New(img) for _, h := range conf.ImageHashes { if dhash.Distance(hash, h) <= conf.DhashThreshold { tally[rule{imageHash, h.String()}]++ } } } scores = conf.categoryScores(tally) categories = conf.significantCategories(scores) thisRule, ignored = conf.ChooseACLCategoryAction(acls, categories, "allow", "block", "block-invisible") if thisRule.Action == "" { thisRule.Action = "allow" } switch thisRule.Action { case "block": conf.showBlockPage(w, r, user, tally, scores, thisRule) logAccess(r, resp, len(content), modified, user, tally, scores, thisRule, pageTitle, ignored, userAgent) return case "block-invisible": showInvisibleBlock(w) logAccess(r, resp, len(content), modified, user, tally, scores, thisRule, pageTitle, ignored, userAgent) return } if !modified { resp.Header.Set("Content-Type", originalContentType) } if gzipOK && len(content) > 1000 { resp.Header.Set("Content-Encoding", "gzip") resp.Header.Del("Content-Length") copyResponseHeader(w, resp) gzw := gzip.NewWriter(w) gzw.Write(content) gzw.Close() } else { copyResponseHeader(w, resp) w.Write(content) } logAccess(r, resp, len(content), modified, user, tally, scores, thisRule, pageTitle, ignored, userAgent) }
//Exchange is a simple utility struct to gather the element //manipulated while communicating with a client within a session type Exchange struct { w http.ResponseWriter r *http.Request ps map[string]string s session.Session cfg config.Rest err error not *notifier.Notifier } // Create a Pool that contains previously used Writers and // can create new ones if we run out. var zippers = sync.Pool{New: func() interface{} { return gzip.NewWriter(nil) }} //V returns the value of the given parameter. func (ex *Exchange) V(id string) string { v, ok := ex.ps[id] if !ok && ex.err == nil { ex.err = errors.New("Missing request parameter '" + id + "'") } return v } func (ex *Exchange) outJSON(j interface{}, e error) error { if e != nil { ex.err = e }
// DetectCompressionType method detects the comperssion type // from header "Accept-Encoding" func (c *CompressResponseWriter) DetectCompressionType(req *Request, resp *Response) { if Config.BoolDefault("results.compressed", false) { acceptedEncodings := strings.Split(req.Request.Header.Get("Accept-Encoding"), ",") largestQ := 0.0 chosenEncoding := len(compressionTypes) // I have fixed one edge case for issue #914 // But it's better to cover all possible edge cases or // Adapt to https://github.com/golang/gddo/blob/master/httputil/header/header.go#L172 for _, encoding := range acceptedEncodings { encoding = strings.TrimSpace(encoding) encodingParts := strings.SplitN(encoding, ";", 2) // If we are the format "gzip;q=0.8" if len(encodingParts) > 1 { q := strings.TrimSpace(encodingParts[1]) if len(q) == 0 || !strings.HasPrefix(q, "q=") { continue } // Strip off the q= num, err := strconv.ParseFloat(q[2:], 32) if err != nil { continue } if num >= largestQ && num > 0 { if encodingParts[0] == "*" { chosenEncoding = 0 largestQ = num continue } for i, encoding := range compressionTypes { if encoding == encodingParts[0] { if i < chosenEncoding { largestQ = num chosenEncoding = i } break } } } } else { // If we can accept anything, chose our preferred method. if encodingParts[0] == "*" { chosenEncoding = 0 largestQ = 1 break } // This is for just plain "gzip" for i, encoding := range compressionTypes { if encoding == encodingParts[0] { if i < chosenEncoding { largestQ = 1.0 chosenEncoding = i } break } } } } if largestQ == 0 { return } c.compressionType = compressionTypes[chosenEncoding] switch c.compressionType { case "gzip": c.compressWriter = gzip.NewWriter(resp.Out) case "deflate": c.compressWriter = zlib.NewWriter(resp.Out) } } }
"io/ioutil" "net" "net/http" "strings" "sync" "github.com/corestoreio/csfw/net/ctxhttp" "github.com/corestoreio/csfw/net/httputil" "github.com/klauspost/compress/flate" "github.com/klauspost/compress/gzip" "golang.org/x/net/context" ) var gzWriterPool = sync.Pool{ New: func() interface{} { return gzip.NewWriter(ioutil.Discard) }, } var defWriterPool = sync.Pool{ New: func() interface{} { w, err := flate.NewWriter(ioutil.Discard, 2) if err != nil { panic(err) } return w }, } type compressWriter struct { io.Writer