// Parse the trailer header func fixTrailer(header http.Header, te []string) (http.Header, error) { vv, ok := header["Trailer"] if !ok { return nil, nil } header.Del("Trailer") trailer := make(http.Header) var err error for _, v := range vv { foreachHeaderElement(v, func(key string) { key = http.CanonicalHeaderKey(key) switch key { case "Transfer-Encoding", "Trailer", "Content-Length": if err == nil { err = fmt.Errorf("bad trailer key: %s", key) return } } trailer[key] = nil }) } if err != nil { return nil, err } if len(trailer) == 0 { return nil, nil } if !chunked(te) { // Trailer and no chunking return nil, http.ErrUnexpectedTrailer } return trailer, nil }
// Parse the trailer header func fixTrailer(header http.Header, te []string) (http.Header, error) { raw := get(header, "Trailer") if raw == "" { return nil, nil } header.Del("Trailer") trailer := make(http.Header) keys := strings.Split(raw, ",") for _, key := range keys { key = http.CanonicalHeaderKey(strings.TrimSpace(key)) switch key { case "Transfer-Encoding", "Trailer", "Content-Length": return nil, &badStringError{"bad trailer key", key} } trailer.Del(key) } if len(trailer) == 0 { return nil, nil } if !chunked(te) { // Trailer and no chunking return nil, http.ErrUnexpectedTrailer } return trailer, nil }
func copyHeaders(dst, src http.Header) { for k, _ := range dst { dst.Del(k) } for k, vs := range src { for _, v := range vs { dst.Add(k, v) } } }
// Removes the Money header from the http.Header func DelAllMNYHeaders(header http.Header) http.Header { for h := range header { if strings.EqualFold(h, HEADER) { header.Del(h) return header } } return header }
// copyHeader copies headers from one http.Header to another. // http://golang.org/src/pkg/net/http/httputil/reverseproxy.go#L72 func copyHeader(dst http.Header, src http.Header) { for k := range dst { dst.Del(k) } for k, vv := range src { for _, v := range vv { dst.Add(k, v) } } }
func copyHeaders(dst, src http.Header) { for k, _ := range dst { dst.Del(k) } for k, v := range src { vCopy := make([]string, len(v)) copy(vCopy, v) dst[k] = vCopy } }
// logic to replace header values in the target with those in the source // this leaves any header values in the target that aren't in the source // if you wanted that, don't call this fuction. func replaceHeaderValues(target *http.Header, source *http.Header) { if target == nil { target = new(http.Header) } for name, vals := range *source { target.Del(name) for _, val := range vals { target.Add(name, val) } } return }
func recalculateContentLength(header http.Header) { var contentLengthStr = header.Get("Content-Length") if contentLengthStr == "" { return } var contentLength, err = strconv.Atoi(contentLengthStr) if err != nil { header.Del("Content-Length") return } contentLength += len(flvHeader) header.Set("Content-Length", strconv.Itoa(contentLength)) }
func mutateHeadersByRules(headers, rules http.Header, repl httpserver.Replacer) { for ruleField, ruleValues := range rules { if strings.HasPrefix(ruleField, "+") { for _, ruleValue := range ruleValues { headers.Add(strings.TrimPrefix(ruleField, "+"), repl.Replace(ruleValue)) } } else if strings.HasPrefix(ruleField, "-") { headers.Del(strings.TrimPrefix(ruleField, "-")) } else if len(ruleValues) > 0 { headers.Set(ruleField, repl.Replace(ruleValues[len(ruleValues)-1])) } } }
func (hr *headersRewrite) rewrite(headers http.Header) { for _, key := range hr.RemoveHeaders { headers.Del(key) } for key, values := range hr.AddHeaders { addValues(headers, key, values) } for key, values := range hr.SetHeaders { headers.Del(key) addValues(headers, key, values) } }
func removeHopByHopHeaders(header http.Header) { // Additional hop-by-hop headers may be specified in `Connection` headers. // http://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-14#section-9.1 for _, vs := range header["Connection"] { for _, v := range strings.Split(vs, ",") { k := http.CanonicalHeaderKey(strings.TrimSpace(v)) header.Del(k) } } for _, k := range HopByHopHeaders { header.Del(k) } }
// Determine the expected body length, using RFC 2616 Section 4.4. This // function is not a method, because ultimately it should be shared by // ReadResponse and ReadRequest. func fixLength(isResponse bool, status int, requestMethod string, header http.Header, te []string) (int64, error) { // Logic based on response type or status if noBodyExpected(requestMethod) { return 0, nil } if status/100 == 1 { return 0, nil } switch status { case 204, 304: return 0, nil } // Logic based on Transfer-Encoding if chunked(te) { return -1, nil } // Logic based on Content-Length cl := strings.TrimSpace(get(header, "Content-Length")) if cl != "" { n, err := strconv.ParseInt(cl, 10, 64) if err != nil || n < 0 { return -1, &badStringError{"bad Content-Length", cl} } return n, nil } else { header.Del("Content-Length") } if !isResponse && requestMethod == "GET" { // RFC 2616 doesn't explicitly permit nor forbid an // entity-body on a GET request so we permit one if // declared, but we default to 0 here (not -1 below) // if there's no mention of a body. return 0, nil } // Logic based on media type. The purpose of the following code is just // to detect whether the unsupported "multipart/byteranges" is being // used. A proper Content-Type parser is needed in the future. if strings.Contains(strings.ToLower(get(header, "Content-Type")), "multipart/byteranges") { return -1, http.ErrNotSupported } // Body-EOF logic based on other methods (like closing, or chunked coding) return -1, nil }
// SetHeaders sets the public headers with an optional must-revalidate header func (p PublicCacheControl) SetHeaders(headers http.Header) { cacheControlValue := fmt.Sprintf("public, max-age=%v, s-maxage=%v", p.MaxAgeInSeconds, p.MaxAgeInSeconds) if p.MustReValidate { cacheControlValue = fmt.Sprintf("%s, must-revalidate", cacheControlValue) } headers.Set("Cache-Control", cacheControlValue) // delete the Pragma directive, because the only valid value in HTTP is // "no-cache" headers.Del("Pragma") if headers.Get("Last-Modified") == "" { SetLastModifiedHeader(headers, time.Time{}) } }
func copyHeader(dst, src http.Header) { // Copy Headers from src to dst for k, vv := range src { for _, v := range vv { dst.Add(k, v) } } // Remove hop-by-hop headers and problem headers for h, _ := range hopHeaders { dst.Del(h) } for h, _ := range skipHeaders { dst.Del(h) } }
func copyHeader(dst, src http.Header) { for k, vv := range src { if _, ok := dst[k]; ok { // skip some predefined headers // see https://github.com/mholt/caddy/issues/1086 if _, shouldSkip := skipHeaders[k]; shouldSkip { continue } // otherwise, overwrite dst.Del(k) } for _, v := range vv { dst.Add(k, v) } } }
// FixBadFraming makes a best effort to fix inconsistencies in the request such // as multiple Content-Lengths or the lack of Content-Length and improper // Transfer-Encoding. If it is unable to determine a proper resolution it // returns ErrBadFraming. // // http://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-14#section-3.3 func FixBadFraming(header http.Header) error { cls := header["Content-Length"] if len(cls) > 0 { var length string // Iterate over all Content-Length headers, splitting any we find with // commas, and check that all Content-Lengths are equal. for _, ls := range cls { for _, l := range strings.Split(ls, ",") { // First length, set it as the canonical Content-Length. if length == "" { length = strings.TrimSpace(l) continue } // Mismatched Content-Lengths. if length != strings.TrimSpace(l) { return ErrBadFraming } } } // All Content-Lengths are equal, remove extras and set it to the canonical // value. header.Set("Content-Length", length) } tes := header["Transfer-Encoding"] if len(tes) > 0 { // Extract the last Transfer-Encoding value, and split on commas. last := strings.Split(tes[len(tes)-1], ",") // Check that the last, potentially comma-delimited, value is "chunked", // else we have no way to determine when the request is finished. if strings.TrimSpace(last[len(last)-1]) != "chunked" { return ErrBadFraming } // Transfer-Encoding "chunked" takes precedence over // Content-Length. header.Del("Content-Length") } return nil }
// Determine whether to hang up after sending a request and body, or // receiving a response and body // 'header' is the request headers func shouldClose(major, minor int, header http.Header) bool { if major < 1 { return true } else if major == 1 && minor == 0 { if !strings.Contains(strings.ToLower(get(header, "Connection")), "keep-alive") { return true } return false } else { // TODO: Should split on commas, toss surrounding white space, // and check each field. if strings.ToLower(get(header, "Connection")) == "close" { header.Del("Connection") return true } } return false }
func mergeHeaders(destination http.Header, headerString string) (err error) { headerString = strings.TrimSpace(headerString) headerString += "\n\n" headerReader := bufio.NewReader(strings.NewReader(headerString)) headers, err := textproto.NewReader(headerReader).ReadMIMEHeader() if err != nil { return } for key, values := range headers { destination.Del(key) for _, value := range values { destination.Add(key, value) } } return }
// Determine whether to hang up after sending a request and body, or // receiving a response and body // 'header' is the request headers func shouldClose(major, minor int, header http.Header, removeCloseHeader bool) bool { if major < 1 { return true } else if major == 1 && minor == 0 { vv := header["Connection"] if headerValuesContainsToken(vv, "close") || !headerValuesContainsToken(vv, "keep-alive") { return true } return false } else { if headerValuesContainsToken(header["Connection"], "close") { if removeCloseHeader { header.Del("Connection") } return true } } return false }
func TestBadFramingTransferEncodingAndContentLength(t *testing.T) { header := http.Header{ "Transfer-Encoding": []string{"gzip, chunked"}, "Content-Length": []string{"42"}, } if err := FixBadFraming(header); err != nil { t.Errorf("FixBadFraming(): got %v, want no error", err) } if _, ok := header["Content-Length"]; ok { t.Fatalf("header[%q]: got ok, want !ok", "Content-Length") } header.Set("Transfer-Encoding", "gzip, identity") header.Del("Content-Length") if got, want := FixBadFraming(header), ErrBadFraming; got != want { t.Errorf("FixBadFraming(): got %v, want %v", got, want) } }
// // //type Header map[string][]string func LHttpHeader(L *lua.LState, h http.Header) *lua.LTable { t := L.NewTable() // for k, vs := range h { // t.RawSetString(k, LStringSlice(L, vs)) // } L.SetFuncs(t, map[string]lua.LGFunction{ //func (h Header) Add(key, value string) "add": func(L *lua.LState) int { h.Add(L.CheckString(1), L.CheckString(2)) return 0 }, //func (h Header) Del(key string) "del": func(L *lua.LState) int { h.Del(L.CheckString(1)) return 0 }, //func (h Header) Get(key string) string "get": func(L *lua.LState) int { L.Push(lua.LString(h.Get(L.CheckString(1)))) return 1 }, //func (h Header) Set(key, value string) "set": func(L *lua.LState) int { h.Set(L.CheckString(1), L.CheckString(2)) return 0 }, //func (h Header) Write(w io.Writer) error "write": func(L *lua.LState) int { L.Push(lua.LNil) return 1 }, //func (h Header) WriteSubset(w io.Writer, exclude map[string]bool) error "write_subset": func(L *lua.LState) int { L.Push(lua.LNil) return 1 }, }) return t }
func createHeadersByRules(rules http.Header, base http.Header, repl httpserver.Replacer) http.Header { newHeaders := make(http.Header) for header, values := range rules { if strings.HasPrefix(header, "+") { header = strings.TrimLeft(header, "+") add(newHeaders, header, base[header]) applyEach(values, repl.Replace) add(newHeaders, header, values) } else if strings.HasPrefix(header, "-") { base.Del(strings.TrimLeft(header, "-")) } else if _, ok := base[header]; ok { applyEach(values, repl.Replace) for _, v := range values { newHeaders.Set(header, v) } } else { applyEach(values, repl.Replace) add(newHeaders, header, values) add(newHeaders, header, base[header]) } } return newHeaders }
// Determine the expected body length, using RFC 2616 Section 4.4. This // function is not a method, because ultimately it should be shared by // ReadResponse and ReadRequest. func fixLength(isResponse bool, status int, requestMethod string, h http.Header) (int64, error) { // Logic based on response type or status if noBodyExpected(requestMethod) { return 0, nil } if status/100 == 1 { return 0, nil } switch status { case 204, 304: return 0, nil } // Logic based on Content-Length cl := strings.TrimSpace(h.Get("Content-Length")) if cl != "" { n, err := parseContentLength(cl) if err != nil { return -1, err } return n, nil } else { h.Del("Content-Length") } if !isResponse && requestMethod == "GET" { // RFC 2616 doesn't explicitly permit nor forbid an // entity-body on a GET request so we permit one if // declared, but we default to 0 here (not -1 below) // if there's no mention of a body. return 0, nil } // Body-EOF logic based on other methods (like closing, or chunked coding) return -1, nil }
// FetchConfigWithHeader fetches a raw config from the provided URL and returns // the response body on success or nil on failure. The caller must also provide // a list of acceptable HTTP status codes and headers. If the response's status // code is not in the provided list, it is considered a failure. The HTTP // response must be OK, otherwise an empty (v.s. nil) config is returned. The // provided headers are merged with a set of default headers. func (c HttpClient) FetchConfigWithHeader(url string, header http.Header, acceptedStatuses ...int) []byte { var config []byte c.logger.LogOp(func() error { reqHeader := http.Header{ "Accept-Encoding": []string{"identity"}, "Accept": []string{"application/vnd.coreos.ignition+json; version=2.0.0, application/vnd.coreos.ignition+json; version=1; q=0.5, */*; q=0.1"}, } for key, values := range header { reqHeader.Del(key) for _, value := range values { reqHeader.Add(key, value) } } data, status, err := c.GetWithHeader(url, reqHeader) if err != nil { return err } for _, acceptedStatus := range acceptedStatuses { if status == acceptedStatus { if status == http.StatusOK { config = data } else { config = []byte{} } return nil } } return fmt.Errorf("%s", http.StatusText(status)) }, "fetching config from %q", url) return config }
// cleanHeaders will unset some headers that // may cause us some issues func cleanHeaders(headers http.Header) http.Header { headers.Del("Content-Length") // Go HTTP Library won't let us send an "off" sized Content-Length header headers.Del("Go-Content-Length") // Same idea. cleanHeaders is called before we strip the "Go-" out of the header list := []string{ "Accept-Charset", "Accept-Encoding", "Access-Control-Request-Headers", "Access-Control-Request-Method", "Connection", "Cookie", "Cookie 2", "Content-Transfer-Encoding", "Date", "Expect", "Host", "Keep-Alive", "Origin", "Referer", "Te", "Trailer", "Transfer-Encoding", "Upgrade", "User-Agent", "Via", } for _, k := range list { key := "Go-" + k if val, ok := headers[key]; ok { headers.Set(k, val[0]) headers.Del(key) } } return headers }
// Compress uses zlib compression to compress the provided // data, according to the SPDY specification of the given version. func (c *compressor) Compress(h http.Header) ([]byte, error) { c.Lock() defer c.Unlock() // Ensure the buffer is prepared. if c.buf == nil { c.buf = new(bytes.Buffer) } else { c.buf.Reset() } // Same for the compressor. if c.w == nil { var err error switch c.version { case 2: select { case c.w = <-zlibV2Writers: c.w.Reset(c.buf) default: c.w, err = zlib.NewWriterLevelDict(c.buf, CompressionLevel, HeaderDictionaryV2) } case 3: select { case c.w = <-zlibV3Writers: c.w.Reset(c.buf) default: c.w, err = zlib.NewWriterLevelDict(c.buf, CompressionLevel, HeaderDictionaryV3) } default: err = versionError } if err != nil { return nil, err } } var size int // Size of length values. switch c.version { case 2: size = 2 case 3: size = 4 default: return nil, versionError } // Remove invalid headers. h.Del("Connection") h.Del("Keep-Alive") h.Del("Proxy-Connection") h.Del("Transfer-Encoding") length := size // The 4-byte or 2-byte number of name/value pairs. pairs := make(map[string]string) // Used to store the validated, joined headers. for name, values := range h { // Ignore invalid names. if _, ok := pairs[name]; ok { // We've already seen this name. return nil, errors.New("Error: Duplicate header name discovered.") } if name == "" { // Ignore empty names. continue } // Multiple values are separated by a single null byte. pairs[name] = strings.Join(values, "\x00") // +size for len(name), +size for len(values). length += len(name) + size + len(pairs[name]) + size } // Uncompressed data. out := make([]byte, length) // Current offset into out. var offset uint32 // Write the number of name/value pairs. num := uint32(len(pairs)) switch c.version { case 3: out[0] = byte(num >> 24) out[1] = byte(num >> 16) out[2] = byte(num >> 8) out[3] = byte(num) offset = 4 case 2: out[0] = byte(num >> 8) out[1] = byte(num) offset = 2 } // For each name/value pair... for name, value := range pairs { // The length of the name. nLen := uint32(len(name)) switch c.version { case 3: out[offset+0] = byte(nLen >> 24) out[offset+1] = byte(nLen >> 16) out[offset+2] = byte(nLen >> 8) out[offset+3] = byte(nLen) offset += 4 case 2: out[offset+0] = byte(nLen >> 8) out[offset+1] = byte(nLen) offset += 2 } // The name itself. copy(out[offset:], []byte(strings.ToLower(name))) offset += nLen // The length of the value. vLen := uint32(len(value)) switch c.version { case 3: out[offset+0] = byte(vLen >> 24) out[offset+1] = byte(vLen >> 16) out[offset+2] = byte(vLen >> 8) out[offset+3] = byte(vLen) offset += 4 case 2: out[offset+0] = byte(vLen >> 8) out[offset+1] = byte(vLen) offset += 2 } // The value itself. copy(out[offset:], []byte(value)) offset += vLen } // Compress. err := WriteExactly(c.w, out) if err != nil { return nil, err } c.w.Flush() return c.buf.Bytes(), nil }
// ServeHTTP implements the http.Handler that proxies WebSocket connections. func (w *WebsocketProxy) ServeHTTP(rw http.ResponseWriter, req *http.Request) { if w.Backend == nil { log.Errorf("Websocketproxy: backend function is not defined") http.Error(rw, "Backend not found", http.StatusInternalServerError) http.NotFound(rw, req) return } backendURL := w.Backend(req) if backendURL == nil { log.Errorf("Websocketproxy: backend URL is nil") http.Error(rw, "Backend URL is nil", http.StatusInternalServerError) return } dialer := w.Dialer if w.Dialer == nil { dialer = DefaultDialer } // Pass headers from the incoming request to the dialer to forward them to // the final destinations. requestHeader := http.Header{} requestHeader.Add("Origin", req.Header.Get("Origin")) for _, prot := range req.Header[http.CanonicalHeaderKey("Sec-WebSocket-Protocol")] { requestHeader.Add("Sec-WebSocket-Protocol", prot) } for _, cookie := range req.Header[http.CanonicalHeaderKey("Cookie")] { requestHeader.Add("Cookie", cookie) } for _, auth := range req.Header[http.CanonicalHeaderKey("Authorization")] { requestHeader.Add("Authorization", auth) } // Pass X-Forwarded-For headers too, code below is a part of // httputil.ReverseProxy. See http://en.wikipedia.org/wiki/X-Forwarded-For // for more information // TODO: use RFC7239 http://tools.ietf.org/html/rfc7239 if clientIP, _, err := net.SplitHostPort(req.RemoteAddr); err == nil { // If we aren't the first proxy retain prior // X-Forwarded-For information as a comma+space // separated list and fold multiple headers into one. if prior, ok := req.Header["X-Forwarded-For"]; ok { clientIP = strings.Join(prior, ", ") + ", " + clientIP } requestHeader.Set("X-Forwarded-For", clientIP) } // Set the originating protocol of the incoming HTTP request. The SSL might // be terminated on our site and because we doing proxy adding this would // be helpful for applications on the backend. requestHeader.Set("X-Forwarded-Proto", "http") if req.TLS != nil { requestHeader.Set("X-Forwarded-Proto", "https") } //frontend Origin != backend Origin requestHeader.Del("Origin") // Connect to the backend URL, also pass the headers we get from the requst // together with the Forwarded headers we prepared above. // TODO: support multiplexing on the same backend connection instead of // opening a new TCP connection time for each request. This should be // optional: // http://tools.ietf.org/html/draft-ietf-hybi-websocket-multiplexing-01 connBackend, resp, err := dialer.Dial(backendURL.String(), requestHeader) if err != nil { log.Errorf("Websocketproxy: couldn't dial to remote backend url %s, %s, %+v", backendURL.String(), err, resp) http.Error(rw, "Remote backend unreachable", http.StatusBadGateway) return } defer connBackend.Close() upgrader := w.Upgrader if w.Upgrader == nil { upgrader = DefaultUpgrader } // Only pass those headers to the upgrader. upgradeHeader := http.Header{} upgradeHeader.Set("Sec-WebSocket-Protocol", resp.Header.Get(http.CanonicalHeaderKey("Sec-WebSocket-Protocol"))) upgradeHeader.Set("Set-Cookie", resp.Header.Get(http.CanonicalHeaderKey("Set-Cookie"))) // Now upgrade the existing incoming request to a WebSocket connection. // Also pass the header that we gathered from the Dial handshake. connPub, err := upgrader.Upgrade(rw, req, upgradeHeader) if err != nil { log.Errorf("Websocketproxy: couldn't upgrade %s", err) http.NotFound(rw, req) return } defer connPub.Close() errc := make(chan error, 2) cp := func(dst io.Writer, src io.Reader) { _, err := io.Copy(dst, src) errc <- err } // Start our proxy now, everything is ready... go cp(connBackend.UnderlyingConn(), connPub.UnderlyingConn()) go cp(connPub.UnderlyingConn(), connBackend.UnderlyingConn()) <-errc }
// Removes the header with the given names from the headers map func RemoveHeaders(names []string, headers http.Header) { for _, h := range names { headers.Del(h) } }
func removeSingleHopHeaders(hdrs *http.Header) { for _, h := range singleHopHeaders { hdrs.Del(h) } }
func setGzipHeaders(hdr http.Header) { // The content-type will be explicitly set somewhere down the path of handlers hdr.Set("Content-Encoding", "gzip") hdr.Del("Content-Length") }