// Handle writes the error from the context into the HttpResponseWriter with a // 500 http.StatusInternalServerError status code. func (h *DefaultErrorHandler) Handle(ctx context.Context) (stop bool, err error) { var handlerError HandlerError = ctx.Data().Get(DataKeyForError).Data().(HandlerError) hostname, _ := os.Hostname() w := ctx.HttpResponseWriter() // write the error out w.Header().Set("Content-Type", "text/html") w.WriteHeader(http.StatusInternalServerError) w.Write([]byte("<!DOCTYPE html><html><head>")) w.Write([]byte("<style>")) w.Write([]byte("h1 { font-size: 17px }")) w.Write([]byte("h1 strong {text-decoration:underline}")) w.Write([]byte("h2 { background-color: #ffd; padding: 20px }")) w.Write([]byte("footer { margin-top: 20px; border-top:1px solid black; padding:10px; font-size:0.9em }")) w.Write([]byte("</style>")) w.Write([]byte("</head><body>")) w.Write([]byte(fmt.Sprintf("<h1>Error in <code>%s</code></h1><h2>%s</h2>", handlerError.Handler, handlerError))) w.Write([]byte(fmt.Sprintf("<h3><code>%s</code> error in Handler <code>%v</code></h3> <code><pre>%s</pre></code>", reflect.TypeOf(handlerError.OriginalError), &handlerError.Handler, handlerError.Handler))) w.Write([]byte(fmt.Sprintf("on %s", hostname))) w.Write([]byte("<footer>Learn more about <a href='http://github.com/stretchr/goweb' target='_blank'>Goweb</a></footer>")) w.Write([]byte("</body></html>")) // responses are actually ignored return false, nil }
// WriteResponseObject writes the status code and response object to the HttpResponseWriter in // the specified context, in the format best suited based on the request. // // Goweb uses the WebCodecService to decide which codec to use when responding // see http://godoc.org/github.com/stretchr/codecs/services#WebCodecService for more information. // // This method should be used when the Goweb Standard Response Object does not satisfy the needs of // the API, but other Respond* methods are recommended. func (a *GowebAPIResponder) WriteResponseObject(ctx context.Context, status int, responseObject interface{}) error { service := a.GetCodecService() acceptHeader := ctx.HttpRequest().Header.Get("Accept") extension := ctx.FileExtension() hasCallback := len(ctx.QueryValue(CallbackParameter)) > 0 codec, codecError := service.GetCodecForResponding(acceptHeader, extension, hasCallback) if codecError != nil { return codecError } options := ctx.CodecOptions() // do we need to add some options? if _, exists := options[constants.OptionKeyClientCallback]; hasCallback && !exists { options[constants.OptionKeyClientCallback] = ctx.QueryValue(CallbackParameter) } output, marshalErr := service.MarshalWithCodec(codec, responseObject, options) if marshalErr != nil { return marshalErr } // use the HTTP responder to respond ctx.HttpResponseWriter().Header().Set("Content-Type", codec.ContentType()) // TODO: test me a.httpResponder.With(ctx, status, output) return nil }
// With writes a response to the request in the specified context. func (r *GowebHTTPResponder) With(ctx context.Context, httpStatus int, body []byte) error { r.WithStatus(ctx, httpStatus) _, writeErr := ctx.HttpResponseWriter().Write(body) return writeErr }
// WithStatus writes the specified HTTP Status Code to the Context's ResponseWriter. // // If the Always200ParamName parameter is present, it will ignore the httpStatus argument, // and always write net/http.StatusOK (200). func (r *GowebHTTPResponder) WithStatus(ctx context.Context, httpStatus int) error { // check for always200 if len(ctx.FormValue(Always200ParamName)) > 0 { // always return OK httpStatus = http.StatusOK } ctx.HttpResponseWriter().WriteHeader(httpStatus) return nil }
// WithStatusText writes the specified HTTP Status Code to the Context's ResponseWriter and // includes a body with the default status text. func (r *GowebHTTPResponder) WithStatusText(ctx context.Context, httpStatus int) error { writeStatusErr := r.WithStatus(ctx, httpStatus) if writeStatusErr != nil { return writeStatusErr } // write the body header _, writeErr := ctx.HttpResponseWriter().Write([]byte(http.StatusText(httpStatus))) return writeErr }
func streamDownload(ctx context.Context, n *node.Node, filename string) { nf, err := n.FileReader() defer nf.Close() if err != nil { // File not found or some sort of file read error. // Probably deserves more checking err_msg := "err:@preAuth node.FileReader: " + err.Error() logger.Error(err_msg) responder.RespondWithError(ctx, 500, err_msg) return } s := &request.Streamer{R: []file.SectionReader{nf}, W: ctx.HttpResponseWriter(), ContentType: "application/octet-stream", Filename: filename, Size: n.File.Size, Filter: nil} err = s.Stream() if err != nil { // causes "multiple response.WriteHeader calls" error but better than no response err_msg := "err:@preAuth: s.stream: " + err.Error() logger.Error(err_msg) responder.RespondWithError(ctx, 500, err_msg) } return }
// handle download and its options func streamDownload(ctx context.Context, n *node.Node, options map[string]string) { nf, err := n.FileReader() defer nf.Close() if err != nil { // File not found or some sort of file read error. // Probably deserves more checking err_msg := "err:@preAuth node.FileReader: " + err.Error() logger.Error(err_msg) responder.RespondWithError(ctx, 500, err_msg) return } // set defaults filename := n.Id var filterFunc filter.FilterFunc = nil var compressionFormat string = "" // use options if exist if fn, has := options["filename"]; has { filename = fn } if fl, has := options["filter"]; has { if filter.Has(fl) { filterFunc = filter.Filter(fl) } } if cp, has := options["compression"]; has { if archive.IsValidCompress(cp) { compressionFormat = cp } } // stream it s := &request.Streamer{R: []file.SectionReader{nf}, W: ctx.HttpResponseWriter(), ContentType: "application/octet-stream", Filename: filename, Size: n.File.Size, Filter: filterFunc, Compression: compressionFormat} err = s.Stream(false) if err != nil { // causes "multiple response.WriteHeader calls" error but better than no response err_msg := "err:@preAuth: s.stream: " + err.Error() logger.Error(err_msg) responder.RespondWithError(ctx, 500, err_msg) } return }
// WithPermanentRedirect responds with a redirection to the specific path or URL with the // http.StatusMovedPermanently status. func (r *GowebHTTPResponder) WithPermanentRedirect(ctx context.Context, pathOrURLSegments ...interface{}) error { ctx.HttpResponseWriter().Header().Set("Location", paths.PathFromSegments(pathOrURLSegments...)) return r.WithStatus(ctx, http.StatusMovedPermanently) }
// GET: /node/{id} func (cr *NodeController) Read(id string, ctx context.Context) error { u, err := request.Authenticate(ctx.HttpRequest()) if err != nil && err.Error() != e.NoAuth { return request.AuthError(err, ctx) } // Fake public user if u == nil { if conf.Bool(conf.Conf["anon-read"]) { u = &user.User{Uuid: ""} } else { return responder.RespondWithError(ctx, http.StatusUnauthorized, e.NoAuth) } } // Load node and handle user unauthorized n, err := node.Load(id, u.Uuid) if err != nil { if err.Error() == e.UnAuth { return responder.RespondWithError(ctx, http.StatusUnauthorized, e.UnAuth) } else if err.Error() == e.MongoDocNotFound { return responder.RespondWithError(ctx, http.StatusNotFound, "Node not found") } else { // In theory the db connection could be lost between // checking user and load but seems unlikely. logger.Error("Err@node_Read:LoadNode:" + id + ":" + err.Error()) n, err = node.LoadFromDisk(id) if err.Error() == "Node does not exist" { logger.Error(err.Error()) return responder.RespondWithError(ctx, http.StatusBadRequest, err.Error()) } else if err != nil { err_msg := "Err@node_Read:LoadNodeFromDisk:" + id + ":" + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusInternalServerError, err_msg) } } } // Gather query params query := ctx.HttpRequest().URL.Query() var fFunc filter.FilterFunc = nil if _, ok := query["filter"]; ok { if filter.Has(query.Get("filter")) { fFunc = filter.Filter(query.Get("filter")) } } // Switch though param flags // ?download=1 or ?download_raw=1 _, download_raw := query["download_raw"] if _, ok := query["download"]; ok || download_raw { if !n.HasFile() { return responder.RespondWithError(ctx, http.StatusBadRequest, "Node has no file") } filename := n.Id if _, ok := query["filename"]; ok { filename = query.Get("filename") } _, seek_ok := query["seek"] if _, length_ok := query["length"]; seek_ok || length_ok { if n.Type == "subset" { return responder.RespondWithError(ctx, http.StatusBadRequest, "subset nodes do not currently support seek/length offset retrieval") } var seek int64 var length int64 if !seek_ok { seek = 0 length_str := query.Get("length") length, err = strconv.ParseInt(length_str, 10, 0) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, "length must be an integer value") } } else if !length_ok { seek_str := query.Get("seek") seek, err = strconv.ParseInt(seek_str, 10, 0) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, "seek must be an integer value") } length = n.File.Size - seek } else { seek_str := query.Get("seek") seek, err = strconv.ParseInt(seek_str, 10, 0) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, "seek must be an integer value") } length_str := query.Get("length") length, err = strconv.ParseInt(length_str, 10, 0) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, "length must be an integer value") } } r, err := n.FileReader() defer r.Close() if err != nil { err_msg := "Err@node_Read:Open: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusInternalServerError, err_msg) } s := &request.Streamer{R: []file.SectionReader{}, W: ctx.HttpResponseWriter(), ContentType: "application/octet-stream", Filename: filename, Size: length, Filter: fFunc} s.R = append(s.R, io.NewSectionReader(r, seek, length)) if download_raw { err = s.StreamRaw() if err != nil { // causes "multiple response.WriteHeader calls" error but better than no response err_msg := "err:@node_Read s.StreamRaw: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusBadRequest, err_msg) } } else { err = s.Stream() if err != nil { // causes "multiple response.WriteHeader calls" error but better than no response err_msg := "err:@node_Read s.Stream: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusBadRequest, err_msg) } } } else if _, ok := query["index"]; ok { //handling bam file if query.Get("index") == "bai" { if n.Type == "subset" { return responder.RespondWithError(ctx, http.StatusBadRequest, "subset nodes do not support bam indices") } s := &request.Streamer{R: []file.SectionReader{}, W: ctx.HttpResponseWriter(), ContentType: "application/octet-stream", Filename: filename, Size: n.File.Size, Filter: fFunc} var region string if _, ok := query["region"]; ok { //retrieve alingments overlapped with specified region region = query.Get("region") } argv, err := request.ParseSamtoolsArgs(ctx) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, "Invaid args in query url") } err = s.StreamSamtools(n.FilePath(), region, argv...) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, "error while invoking samtools") } return nil } // open file r, err := n.FileReader() defer r.Close() if err != nil { err_msg := "Err@node_Read:Open: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusInternalServerError, err_msg) } // load index obj and info idxName := query.Get("index") idxInfo, ok := n.Indexes[idxName] if !ok { return responder.RespondWithError(ctx, http.StatusBadRequest, "Invalid index") } idx, err := n.DynamicIndex(idxName) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, err.Error()) } if idx.Type() == "virtual" { if n.Type == "subset" { return responder.RespondWithError(ctx, http.StatusBadRequest, "subset nodes do not currently support virtual indices") } csize := conf.CHUNK_SIZE if _, ok := query["chunk_size"]; ok { csize, err = strconv.ParseInt(query.Get("chunk_size"), 10, 64) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, "Invalid chunk_size") } } idx.Set(map[string]interface{}{"ChunkSize": csize}) } var size int64 = 0 s := &request.Streamer{R: []file.SectionReader{}, W: ctx.HttpResponseWriter(), ContentType: "application/octet-stream", Filename: filename, Filter: fFunc} _, hasPart := query["part"] if n.Type == "subset" && idxName == "chunkrecord" { recordIdxName := "record" recordIdxInfo, ok := n.Indexes[recordIdxName] if !ok { return responder.RespondWithError(ctx, http.StatusBadRequest, "Invalid request, record index must exist to retrieve chunkrecord index on a subset node.") } recordIdx, err := n.DynamicIndex(recordIdxName) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, err.Error()) } if !hasPart { // download full subset file fullRange := "1-" + strconv.FormatInt(recordIdxInfo.TotalUnits, 10) recSlice, err := recordIdx.Range(fullRange, n.IndexPath()+"/"+recordIdxName+".idx", recordIdxInfo.TotalUnits) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, "Invalid index subset") } for _, rec := range recSlice { size += rec[1] s.R = append(s.R, io.NewSectionReader(r, rec[0], rec[1])) } } else if hasPart { // download parts for _, p := range query["part"] { chunkRecSlice, err := idx.Range(p, n.IndexPath()+"/"+idxName+".idx", idxInfo.TotalUnits) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, "Invalid index part") } // This gets us the parts of the chunkrecord index, but we still need to convert these to record indices. for _, chunkRec := range chunkRecSlice { start := (chunkRec[0] / 16) + 1 stop := (start - 1) + (chunkRec[1] / 16) recSlice, err := recordIdx.Range(strconv.FormatInt(start, 10)+"-"+strconv.FormatInt(stop, 10), n.IndexPath()+"/"+recordIdxName+".idx", recordIdxInfo.TotalUnits) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, "Invalid index subset") } for _, rec := range recSlice { size += rec[1] s.R = append(s.R, io.NewSectionReader(r, rec[0], rec[1])) } } } } else { // bad request return responder.RespondWithError(ctx, http.StatusBadRequest, "Index parameter requires part parameter") } } else { if (!hasPart) && (idxInfo.Type == "subset") { // download full subset file fullRange := "1-" + strconv.FormatInt(idxInfo.TotalUnits, 10) recSlice, err := idx.Range(fullRange, n.IndexPath()+"/"+idxName+".idx", idxInfo.TotalUnits) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, "Invalid index subset") } for _, rec := range recSlice { size += rec[1] s.R = append(s.R, io.NewSectionReader(r, rec[0], rec[1])) } } else if hasPart { // download parts for _, p := range query["part"] { // special case for subset ranges if idxInfo.Type == "subset" { recSlice, err := idx.Range(p, n.IndexPath()+"/"+idxName+".idx", idxInfo.TotalUnits) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, "Invalid index part") } for _, rec := range recSlice { size += rec[1] s.R = append(s.R, io.NewSectionReader(r, rec[0], rec[1])) } } else { pos, length, err := idx.Part(p, n.IndexPath()+"/"+idxName+".idx", idxInfo.TotalUnits) if err != nil { return responder.RespondWithError(ctx, http.StatusBadRequest, "Invalid index part") } size += length s.R = append(s.R, io.NewSectionReader(r, pos, length)) } } } else { // bad request return responder.RespondWithError(ctx, http.StatusBadRequest, "Index parameter requires part parameter") } } s.Size = size if download_raw { err = s.StreamRaw() if err != nil { // causes "multiple response.WriteHeader calls" error but better than no response err_msg := "err:@node_Read s.StreamRaw: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusBadRequest, err_msg) } } else { err = s.Stream() if err != nil { // causes "multiple response.WriteHeader calls" error but better than no response err_msg := "err:@node_Read s.Stream: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusBadRequest, err_msg) } } // download full file } else { if n.Type == "subset" { // open file r, err := n.FileReader() defer r.Close() if err != nil { err_msg := "Err@node_Read:Open: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusInternalServerError, err_msg) } idx := index.New() s := &request.Streamer{R: []file.SectionReader{}, W: ctx.HttpResponseWriter(), ContentType: "application/octet-stream", Filename: filename, Size: n.File.Size, Filter: fFunc} fullRange := "1-" + strconv.FormatInt(n.Subset.Index.TotalUnits, 10) recSlice, err := idx.Range(fullRange, n.Path()+"/"+n.Id+".subset.idx", n.Subset.Index.TotalUnits) if err != nil { return responder.RespondWithError(ctx, http.StatusInternalServerError, "Invalid data index for subset node.") } for _, rec := range recSlice { s.R = append(s.R, io.NewSectionReader(r, rec[0], rec[1])) } if download_raw { err = s.StreamRaw() if err != nil { // causes "multiple response.WriteHeader calls" error but better than no response err_msg := "err:@node_Read s.StreamRaw: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusBadRequest, err_msg) } } else { err = s.Stream() if err != nil { // causes "multiple response.WriteHeader calls" error but better than no response err_msg := "err:@node_Read s.Stream: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusBadRequest, err_msg) } } } else { nf, err := n.FileReader() defer nf.Close() if err != nil { // File not found or some sort of file read error. // Probably deserves more checking err_msg := "err:@node_Read node.FileReader: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusBadRequest, err_msg) } s := &request.Streamer{R: []file.SectionReader{nf}, W: ctx.HttpResponseWriter(), ContentType: "application/octet-stream", Filename: filename, Size: n.File.Size, Filter: fFunc} if download_raw { err = s.StreamRaw() if err != nil { // causes "multiple response.WriteHeader calls" error but better than no response err_msg := "err:@node_Read s.StreamRaw: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusBadRequest, err_msg) } } else { err = s.Stream() if err != nil { // causes "multiple response.WriteHeader calls" error but better than no response err_msg := "err:@node_Read s.Stream: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusBadRequest, err_msg) } } } } } else if _, ok := query["download_url"]; ok { if n.Type == "subset" { return responder.RespondWithError(ctx, http.StatusBadRequest, "subset nodes do not currently support download_url operation") } if !n.HasFile() { return responder.RespondWithError(ctx, http.StatusBadRequest, "Node has no file") } else { options := map[string]string{} if _, ok := query["filename"]; ok { options["filename"] = query.Get("filename") } if p, err := preauth.New(util.RandString(20), "download", n.Id, options); err != nil { err_msg := "err:@node_Read download_url: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusInternalServerError, err_msg) } else { return responder.RespondWithData(ctx, util.UrlResponse{Url: util.ApiUrl(ctx) + "/preauth/" + p.Id, ValidTill: p.ValidTill.Format(time.ANSIC)}) } } } else if _, ok := query["download_post"]; ok { // This is a request to post the node to another Shock server. The 'post_url' parameter is required. // By default the post operation will include the data file and attributes (these options can be set // with post_data=0/1 and post_attr=0/1). if n.Type == "subset" { return responder.RespondWithError(ctx, http.StatusBadRequest, "subset nodes do not currently support download_post operation") } post_url := "" if _, ok := query["post_url"]; ok { post_url = query.Get("post_url") } else { return responder.RespondWithError(ctx, http.StatusBadRequest, "Request type requires post_url parameter of where to post new Shock node") } post_opts := map[string]int{ "post_data": 1, "post_attr": 1, } for k, _ := range post_opts { if _, ok := query[k]; ok { if query.Get(k) == "0" { post_opts[k] = 0 } else if query.Get(k) == "1" { post_opts[k] = 1 } else { return responder.RespondWithError(ctx, http.StatusBadRequest, "Parameter "+k+" must be either 0 or 1") } } } form := client.NewForm() form.AddParam("file_name", n.File.Name) if post_opts["post_data"] == 1 { form.AddFile("upload", n.FilePath()) } if post_opts["post_attr"] == 1 && n.Attributes != nil { attr, _ := json.Marshal(n.Attributes) form.AddParam("attributes_str", string(attr[:])) } err = form.Create() if err != nil { err_msg := "could not create multipart form for posting to Shock server: " + err.Error() logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusInternalServerError, err_msg) } headers := client.Header{ "Content-Type": form.ContentType, "Content-Length": strconv.FormatInt(form.Length, 10), } if _, hasAuth := ctx.HttpRequest().Header["Authorization"]; hasAuth { headers["Authorization"] = ctx.HttpRequest().Header.Get("Authorization") } if res, err := client.Do("POST", post_url, headers, form.Reader); err == nil { if res.StatusCode == 200 { r := responseWrapper{} body, _ := ioutil.ReadAll(res.Body) if err = json.Unmarshal(body, &r); err != nil { err_msg := "err:@node_Read POST: " + err.Error() logger.Error(err_msg) return responder.WriteResponseObject(ctx, http.StatusInternalServerError, err_msg) } else { return responder.WriteResponseObject(ctx, http.StatusOK, r) } } else { r := responseWrapper{} body, _ := ioutil.ReadAll(res.Body) if err = json.Unmarshal(body, &r); err == nil { err_msg := res.Status + ": " + (*r.Error)[0] logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusInternalServerError, err_msg) } else { err_msg := "request error: " + res.Status logger.Error(err_msg) return responder.RespondWithError(ctx, http.StatusInternalServerError, err_msg) } } } else { return err } } else { // Base case respond with node in json return responder.RespondWithData(ctx, n) } return nil }
func addResponseHeaders(ctx context.Context) { ctx.HttpResponseWriter().Header().Set("Connection", "close") ctx.HttpResponseWriter().Header().Set("Access-Control-Allow-Headers", "Authorization") ctx.HttpResponseWriter().Header().Set("Access-Control-Allow-Methods", "POST, GET, PUT, DELETE, OPTIONS") ctx.HttpResponseWriter().Header().Set("Access-Control-Allow-Origin", "*") }