func handleVerify(conn http.ResponseWriter, req *http.Request) { if !(req.Method == "POST" && req.URL.Path == "/camli/sig/verify") { httputil.BadRequestError(conn, "Inconfigured handler.") return } req.ParseForm() sjson := req.FormValue("sjson") if sjson == "" { httputil.BadRequestError(conn, "Missing sjson parameter.") return } m := make(map[string]interface{}) vreq := jsonsign.NewVerificationRequest(sjson, pubKeyFetcher) if vreq.Verify() { m["signatureValid"] = 1 m["verifiedData"] = vreq.PayloadMap } else { errStr := vreq.Err.String() m["signatureValid"] = 0 m["errorMessage"] = errStr } conn.WriteHeader(http.StatusOK) // no HTTP response code fun, error info in JSON httputil.ReturnJson(conn, m) }
func handleTestForm(conn http.ResponseWriter, req *http.Request) { if !(req.Method == "POST" && req.URL.Path == "/camli/testform") { httputil.BadRequestError(conn, "Inconfigured handler.") return } multipart, err := req.MultipartReader() if multipart == nil { httputil.BadRequestError(conn, fmt.Sprintf("Expected multipart/form-data POST request; %v", err)) return } for { part, err := multipart.NextPart() if err != nil { fmt.Println("Error reading:", err) break } if part == nil { break } formName := part.FormName() fmt.Printf("New value [%s], part=%v\n", formName, part) sha1 := sha1.New() io.Copy(sha1, part) fmt.Printf("Got part digest: %x\n", sha1.Sum()) } fmt.Println("Done reading multipart body.") }
func handleRemove(conn http.ResponseWriter, req *http.Request, storage blobserver.Storage) { if w, ok := storage.(blobserver.ContextWrapper); ok { storage = w.WrapContext(req) } if req.Method != "POST" { log.Fatalf("Invalid method; handlers misconfigured") } configer, ok := storage.(blobserver.Configer) if !ok { conn.WriteHeader(http.StatusForbidden) fmt.Fprintf(conn, "Remove handler's blobserver.Storage isn't a blobserver.Configuer; can't remove") return } if !configer.Config().IsQueue { conn.WriteHeader(http.StatusForbidden) fmt.Fprintf(conn, "Can only remove blobs from a queue.\n") return } n := 0 toRemove := make([]*blobref.BlobRef, 0) toRemoveStr := make([]string, 0) for { n++ if n > maxRemovesPerRequest { httputil.BadRequestError(conn, fmt.Sprintf("Too many removes in this request; max is %d", maxRemovesPerRequest)) return } key := fmt.Sprintf("blob%v", n) value := req.FormValue(key) if value == "" { break } ref := blobref.Parse(value) if ref == nil { httputil.BadRequestError(conn, "Bogus blobref for key "+key) return } toRemove = append(toRemove, ref) toRemoveStr = append(toRemoveStr, ref.String()) } err := storage.RemoveBlobs(toRemove) if err != nil { conn.WriteHeader(http.StatusInternalServerError) log.Printf("Server error during remove: %v", err) fmt.Fprintf(conn, "Server error") return } reply := make(map[string]interface{}, 0) reply["removed"] = toRemoveStr httputil.ReturnJson(conn, reply) }
func handleCamli(conn http.ResponseWriter, req *http.Request) { handler := func(conn http.ResponseWriter, req *http.Request) { httputil.BadRequestError(conn, fmt.Sprintf("Unsupported path (%s) or method (%s).", req.URL.Path, req.Method)) } if *flagRequestLog { log.Printf("%s %s", req.Method, req.RawURL) } switch req.Method { case "GET": switch req.URL.Path { case "/camli/enumerate-blobs": handler = auth.RequireAuth(handleEnumerateBlobs) default: handler = createGetHandler(blobFetcher) } case "POST": switch req.URL.Path { case "/camli/preupload": handler = auth.RequireAuth(handlePreUpload) case "/camli/upload": handler = auth.RequireAuth(handleMultiPartUpload) case "/camli/testform": // debug only handler = handleTestForm case "/camli/form": // debug only handler = handleCamliForm } case "PUT": // no longer part of spec handler = auth.RequireAuth(handlePut) } handler(conn, req) }
func handlePut(conn http.ResponseWriter, req *http.Request) { blobRef := BlobFromUrlPath(req.URL.Path) if blobRef == nil { httputil.BadRequestError(conn, "Malformed PUT URL.") return } if !blobRef.IsSupported() { httputil.BadRequestError(conn, "unsupported object hash function") return } _, err := receiveBlob(blobRef, req.Body) if err != nil { httputil.ServerError(conn, err) return } fmt.Fprint(conn, "OK") }
func handlePut(conn http.ResponseWriter, req *http.Request, blobReceiver blobserver.BlobReceiver) { if w, ok := blobReceiver.(blobserver.ContextWrapper); ok { blobReceiver = w.WrapContext(req) } blobRef := blobref.FromPattern(kPutPattern, req.URL.Path) if blobRef == nil { httputil.BadRequestError(conn, "Malformed PUT URL.") return } if !blobRef.IsSupported() { httputil.BadRequestError(conn, "unsupported object hash function") return } _, err := blobReceiver.ReceiveBlob(blobRef, req.Body) if err != nil { httputil.ServerError(conn, err) return } fmt.Fprint(conn, "OK") }
func handleCamliSig(conn http.ResponseWriter, req *http.Request) { handler := func(conn http.ResponseWriter, req *http.Request) { httputil.BadRequestError(conn, "Unsupported path or method.") } switch req.Method { case "POST": switch req.URL.Path { case "/camli/sig/sign": handler = auth.RequireAuth(handleSign) case "/camli/sig/verify": handler = handleVerify } } handler(conn, req) }
func handleStat(conn http.ResponseWriter, req *http.Request, storage blobserver.BlobStatter) { if w, ok := storage.(blobserver.ContextWrapper); ok { storage = w.WrapContext(req) } toStat := make([]*blobref.BlobRef, 0) switch req.Method { case "POST": fallthrough case "GET": camliVersion := req.FormValue("camliversion") if camliVersion == "" { httputil.BadRequestError(conn, "No camliversion") return } n := 0 for { n++ key := fmt.Sprintf("blob%v", n) value := req.FormValue(key) if value == "" { n-- break } if n > maxStatBlobs { httputil.BadRequestError(conn, "Too many stat blob checks") return } ref := blobref.Parse(value) if ref == nil { httputil.BadRequestError(conn, "Bogus blobref for key "+key) return } toStat = append(toStat, ref) } default: httputil.BadRequestError(conn, "Invalid method.") return } waitSeconds := 0 if waitStr := req.FormValue("maxwaitsec"); waitStr != "" { waitSeconds, _ = strconv.Atoi(waitStr) switch { case waitSeconds < 0: waitSeconds = 0 case waitSeconds > 30: // TODO: don't hard-code 30. push this up into a blobserver interface // for getting the configuration of the server (ultimately a flag in // in the binary) waitSeconds = 30 } } statRes := make([]map[string]interface{}, 0) if len(toStat) > 0 { blobch := make(chan blobref.SizedBlobRef) resultch := make(chan os.Error, 1) go func() { err := storage.StatBlobs(blobch, toStat, waitSeconds) close(blobch) resultch <- err }() for sb := range blobch { ah := make(map[string]interface{}) ah["blobRef"] = sb.BlobRef.String() ah["size"] = sb.Size statRes = append(statRes, ah) } err := <-resultch if err != nil { log.Printf("Stat error: %v", err) conn.WriteHeader(http.StatusInternalServerError) return } } configer, _ := storage.(blobserver.Configer) ret := commonUploadResponse(configer, req) ret["stat"] = statRes ret["canLongPoll"] = true httputil.ReturnJson(conn, ret) }
func unsupportedHandler(conn http.ResponseWriter, req *http.Request) { httputil.BadRequestError(conn, "Unsupported camlistore path or method.") }
func handleMultiPartUpload(conn http.ResponseWriter, req *http.Request, blobReceiver blobserver.BlobReceiveConfiger) { if w, ok := blobReceiver.(blobserver.ContextWrapper); ok { blobReceiver = w.WrapContext(req).(blobserver.BlobReceiveConfiger) } if !(req.Method == "POST" && strings.Contains(req.URL.Path, "/camli/upload")) { log.Printf("Inconfigured handler upload handler") httputil.BadRequestError(conn, "Inconfigured handler.") return } receivedBlobs := make([]blobref.SizedBlobRef, 0, 10) multipart, err := req.MultipartReader() if multipart == nil { httputil.BadRequestError(conn, fmt.Sprintf( "Expected multipart/form-data POST request; %v", err)) return } var errText string addError := func(s string) { log.Printf("Client error: %s", s) if errText == "" { errText = s return } errText = errText + "\n" + s } for { mimePart, err := multipart.NextPart() if err == os.EOF { break } if err != nil { addError(fmt.Sprintf("Error reading multipart section: %v", err)) break } //POST-r60: //contentDisposition, params, err := mime.ParseMediaType(mimePart.Header.Get("Content-Disposition")) //if err != nil { // addError(err.String()) // break //} // r60: contentDisposition, params := mime.ParseMediaType(mimePart.Header.Get("Content-Disposition")) if contentDisposition == "" { addError("invalid Content-Disposition") break } if contentDisposition != "form-data" { addError(fmt.Sprintf("Expected Content-Disposition of \"form-data\"; got %q", contentDisposition)) break } formName := params["name"] ref := blobref.Parse(formName) if ref == nil { addError(fmt.Sprintf("Ignoring form key %q", formName)) continue } if oldAppEngineHappySpec { _, hasContentType := mimePart.Header["Content-Type"] if !hasContentType { addError(fmt.Sprintf("Expected Content-Type header for blobref %s; see spec", ref)) continue } _, hasFileName := params["filename"] if !hasFileName { addError(fmt.Sprintf("Expected 'filename' Content-Disposition parameter for blobref %s; see spec", ref)) continue } } blobGot, err := blobReceiver.ReceiveBlob(ref, mimePart) if err != nil { addError(fmt.Sprintf("Error receiving blob %v: %v\n", ref, err)) break } log.Printf("Received blob %v\n", blobGot) receivedBlobs = append(receivedBlobs, blobGot) } log.Println("Done reading multipart body.") ret := commonUploadResponse(blobReceiver, req) received := make([]map[string]interface{}, 0) for _, got := range receivedBlobs { log.Printf("Got blob: %v\n", got) blob := make(map[string]interface{}) blob["blobRef"] = got.BlobRef.String() blob["size"] = got.Size received = append(received, blob) } ret["received"] = received if errText != "" { ret["errorText"] = errText } httputil.ReturnJson(conn, ret) }
func handleMultiPartUpload(conn http.ResponseWriter, req *http.Request) { if !(req.Method == "POST" && req.URL.Path == "/camli/upload") { httputil.BadRequestError(conn, "Inconfigured handler.") return } receivedBlobs := make([]*receivedBlob, 0, 10) multipart, err := req.MultipartReader() if multipart == nil { httputil.BadRequestError(conn, fmt.Sprintf( "Expected multipart/form-data POST request; %v", err)) return } var errText string addError := func(s string) { log.Printf("Client error: %s", s) if errText == "" { errText = s return } errText = errText + "\n" + s } for { part, err := multipart.NextPart() if err != nil { addError(fmt.Sprintf("Error reading multipart section: %v", err)) break } if part == nil { break } contentDisposition, params := mime.ParseMediaType(part.Header["Content-Disposition"]) if contentDisposition != "form-data" { addError(fmt.Sprintf("Expected Content-Disposition of \"form-data\"; got %q", contentDisposition)) break } formName := params["name"] ref := blobref.Parse(formName) if ref == nil { addError(fmt.Sprintf("Ignoring form key %q", formName)) continue } _, hasContentType := part.Header["Content-Type"] if !hasContentType { addError(fmt.Sprintf("Expected Content-Type header for blobref %s; see spec", ref)) continue } _, hasFileName := params["filename"] if !hasFileName { addError(fmt.Sprintf("Expected 'filename' Content-Disposition parameter for blobref %s; see spec", ref)) continue } blobGot, err := receiveBlob(ref, part) if err != nil { addError(fmt.Sprintf("Error receiving blob %v: %v\n", ref, err)) break } log.Printf("Received blob %v\n", blobGot) receivedBlobs = append(receivedBlobs, blobGot) } log.Println("Done reading multipart body.") ret := commonUploadResponse(req) received := make([]map[string]interface{}, 0) for _, got := range receivedBlobs { log.Printf("Got blob: %v\n", got) blob := make(map[string]interface{}) blob["blobRef"] = got.blobRef.String() blob["size"] = got.size received = append(received, blob) } ret["received"] = received if errText != "" { ret["errorText"] = errText } httputil.ReturnJson(conn, ret) }
// Unauthenticated user. Be paranoid. func handleGetViaSharing(conn http.ResponseWriter, req *http.Request, blobRef *blobref.BlobRef, fetcher blobref.StreamingFetcher) { if w, ok := fetcher.(blobserver.ContextWrapper); ok { fetcher = w.WrapContext(req) } viaPathOkay := false startTime := time.Nanoseconds() defer func() { if !viaPathOkay { // Insert a delay, to hide timing attacks probing // for the existence of blobs. sleep := fetchFailureDelayNs - (time.Nanoseconds() - startTime) if sleep > 0 { time.Sleep(sleep) } } }() viaBlobs := make([]*blobref.BlobRef, 0) if via := req.FormValue("via"); via != "" { for _, vs := range strings.Split(via, ",") { if br := blobref.Parse(vs); br == nil { httputil.BadRequestError(conn, "Malformed blobref in via param") return } else { viaBlobs = append(viaBlobs, br) } } } fetchChain := make([]*blobref.BlobRef, 0) fetchChain = append(fetchChain, viaBlobs...) fetchChain = append(fetchChain, blobRef) for i, br := range fetchChain { switch i { case 0: file, size, err := fetcher.FetchStreaming(br) if err != nil { log.Printf("Fetch chain 0 of %s failed: %v", br.String(), err) auth.SendUnauthorized(conn) return } defer file.Close() if size > maxJsonSize { log.Printf("Fetch chain 0 of %s too large", br.String()) auth.SendUnauthorized(conn) return } jd := json.NewDecoder(file) m := make(map[string]interface{}) if err := jd.Decode(&m); err != nil { log.Printf("Fetch chain 0 of %s wasn't JSON: %v", br.String(), err) auth.SendUnauthorized(conn) return } if m["camliType"].(string) != "share" { log.Printf("Fetch chain 0 of %s wasn't a share", br.String()) auth.SendUnauthorized(conn) return } if len(fetchChain) > 1 && fetchChain[1].String() != m["target"].(string) { log.Printf("Fetch chain 0->1 (%s -> %q) unauthorized, expected hop to %q", br.String(), fetchChain[1].String(), m["target"]) auth.SendUnauthorized(conn) return } case len(fetchChain) - 1: // Last one is fine (as long as its path up to here has been proven, and it's // not the first thing in the chain) continue default: file, _, err := fetcher.FetchStreaming(br) if err != nil { log.Printf("Fetch chain %d of %s failed: %v", i, br.String(), err) auth.SendUnauthorized(conn) return } defer file.Close() lr := io.LimitReader(file, maxJsonSize) slurpBytes, err := ioutil.ReadAll(lr) if err != nil { log.Printf("Fetch chain %d of %s failed in slurp: %v", i, br.String(), err) auth.SendUnauthorized(conn) return } saught := fetchChain[i+1].String() if bytes.IndexAny(slurpBytes, saught) == -1 { log.Printf("Fetch chain %d of %s failed; no reference to %s", i, br.String(), saught) auth.SendUnauthorized(conn) return } } } viaPathOkay = true serveBlobRef(conn, req, blobRef, fetcher) }
func handleGet(conn http.ResponseWriter, req *http.Request, fetcher blobref.Fetcher) { isOwner := auth.IsAuthorized(req) blobRef := BlobFromUrlPath(req.URL.Path) if blobRef == nil { httputil.BadRequestError(conn, "Malformed GET URL.") return } var viaBlobs []*blobref.BlobRef if !isOwner { viaPathOkay := false startTime := time.Nanoseconds() defer func() { if !viaPathOkay { // Insert a delay, to hide timing attacks probing // for the existence of blobs. sleep := fetchFailureDelayNs - (time.Nanoseconds() - startTime) if sleep > 0 { time.Sleep(sleep) } } }() viaBlobs = make([]*blobref.BlobRef, 0) if via := req.FormValue("via"); via != "" { for _, vs := range strings.Split(via, ",", -1) { if br := blobref.Parse(vs); br == nil { httputil.BadRequestError(conn, "Malformed blobref in via param") return } else { viaBlobs = append(viaBlobs, br) } } } fetchChain := make([]*blobref.BlobRef, 0) fetchChain = append(fetchChain, viaBlobs...) fetchChain = append(fetchChain, blobRef) for i, br := range fetchChain { switch i { case 0: file, size, err := fetcher.Fetch(br) if err != nil { log.Printf("Fetch chain 0 of %s failed: %v", br.String(), err) sendUnauthorized(conn) return } defer file.Close() if size > maxJsonSize { log.Printf("Fetch chain 0 of %s too large", br.String()) sendUnauthorized(conn) return } jd := json.NewDecoder(file) m := make(map[string]interface{}) if err := jd.Decode(&m); err != nil { log.Printf("Fetch chain 0 of %s wasn't JSON: %v", br.String(), err) sendUnauthorized(conn) return } if m["camliType"].(string) != "share" { log.Printf("Fetch chain 0 of %s wasn't a share", br.String()) sendUnauthorized(conn) return } if len(fetchChain) > 1 && fetchChain[1].String() != m["target"].(string) { log.Printf("Fetch chain 0->1 (%s -> %q) unauthorized, expected hop to %q", br.String(), fetchChain[1].String(), m["target"]) sendUnauthorized(conn) return } case len(fetchChain) - 1: // Last one is fine (as long as its path up to here has been proven, and it's // not the first thing in the chain) continue default: file, _, err := fetcher.Fetch(br) if err != nil { log.Printf("Fetch chain %d of %s failed: %v", i, br.String(), err) sendUnauthorized(conn) return } defer file.Close() lr := io.LimitReader(file, maxJsonSize) slurpBytes, err := ioutil.ReadAll(lr) if err != nil { log.Printf("Fetch chain %d of %s failed in slurp: %v", i, br.String(), err) sendUnauthorized(conn) return } saught := fetchChain[i+1].String() if bytes.IndexAny(slurpBytes, saught) == -1 { log.Printf("Fetch chain %d of %s failed; no reference to %s", i, br.String(), saught) sendUnauthorized(conn) return } } } viaPathOkay = true } file, size, err := fetcher.Fetch(blobRef) switch err { case nil: break case os.ENOENT: conn.WriteHeader(http.StatusNotFound) fmt.Fprintf(conn, "Object not found.") return default: httputil.ServerError(conn, err) return } defer file.Close() reqRange := getRequestedRange(req) if reqRange.SkipBytes != 0 { _, err = file.Seek(reqRange.SkipBytes, 0) if err != nil { httputil.ServerError(conn, err) return } } var input io.Reader = file if reqRange.LimitBytes != -1 { input = io.LimitReader(file, reqRange.LimitBytes) } remainBytes := size - reqRange.SkipBytes if reqRange.LimitBytes != -1 && reqRange.LimitBytes < remainBytes { remainBytes = reqRange.LimitBytes } // Assume this generic content type by default. For better // demos we'll try to sniff and guess the "right" MIME type in // certain cases (no Range requests, etc) but this isn't part // of the Camli spec at all. We just do it to ease demos. contentType := "application/octet-stream" if reqRange.IsWholeFile() { const peekSize = 1024 bufReader, _ := bufio.NewReaderSize(input, peekSize) header, _ := bufReader.Peek(peekSize) if len(header) >= 8 { switch { case isValidUtf8(string(header)): contentType = "text/plain; charset=utf-8" case bytes.HasPrefix(header, []byte{0xff, 0xd8, 0xff, 0xe2}): contentType = "image/jpeg" case bytes.HasPrefix(header, []byte{0x89, 0x50, 0x4e, 0x47, 0xd, 0xa, 0x1a, 0xa}): contentType = "image/png" } } input = bufReader } conn.SetHeader("Content-Type", contentType) if !reqRange.IsWholeFile() { conn.SetHeader("Content-Range", fmt.Sprintf("bytes %d-%d/%d", reqRange.SkipBytes, reqRange.SkipBytes+remainBytes, size)) conn.WriteHeader(http.StatusPartialContent) } bytesCopied, err := io.Copy(conn, input) // If there's an error at this point, it's too late to tell the client, // as they've already been receiving bytes. But they should be smart enough // to verify the digest doesn't match. But we close the (chunked) response anyway, // to further signal errors. killConnection := func() { closer, _, err := conn.Hijack() if err != nil { closer.Close() } } if err != nil { fmt.Fprintf(os.Stderr, "Error sending file: %v, err=%v\n", blobRef, err) killConnection() return } if bytesCopied != remainBytes { fmt.Fprintf(os.Stderr, "Error sending file: %v, copied=%d, not %d\n", blobRef, bytesCopied, remainBytes) killConnection() return } }
func handlePreUpload(conn http.ResponseWriter, req *http.Request) { if !(req.Method == "POST" && req.URL.Path == "/camli/preupload") { httputil.BadRequestError(conn, "Inconfigured handler.") return } req.ParseForm() camliVersion := req.FormValue("camliversion") if camliVersion == "" { httputil.BadRequestError(conn, "No camliversion") return } n := 0 haveVector := new(vector.Vector) haveChan := make(chan *map[string]interface{}) for { key := fmt.Sprintf("blob%v", n+1) value := req.FormValue(key) if value == "" { break } ref := blobref.Parse(value) if ref == nil { httputil.BadRequestError(conn, "Bogus blobref for key "+key) return } if !ref.IsSupported() { httputil.BadRequestError(conn, "Unsupported or bogus blobref "+key) } n++ // Parallel stat all the files... go func() { fi, err := os.Stat(BlobFileName(ref)) if err == nil && fi.IsRegular() { info := make(map[string]interface{}) info["blobRef"] = ref.String() info["size"] = fi.Size haveChan <- &info } else { haveChan <- nil } }() } if n > 0 { for have := range haveChan { if have != nil { haveVector.Push(have) } n-- if n == 0 { break } } } ret := commonUploadResponse(req) ret["alreadyHave"] = haveVector.Copy() httputil.ReturnJson(conn, ret) }