func main() { keyPrice := getKeyPrice() fmt.Printf("Key price: %f\n", keyPrice) fileName := os.Args[1] fmt.Printf("Processing csv file: %s\n", fileName) fh, err := os.Open(fileName) if err != nil { fmt.Printf("Error: can't open file: %s, msg: %s\n", fileName, err) os.Exit(1) } csvReader := csv.NewReader(fh) records, err := csvReader.ReadAll() if err != nil { fmt.Printf("Error: can't parse file: %s, msg: %s\n", fileName, err) os.Exit(1) } fmt.Println(len(records)) for _, row := range records { url1, _ := url.QueryUnescape(row[0]) url2, _ := url.QueryUnescape(row[1]) fmt.Printf("url: %s url2: %s\n", url1, url2) } }
func parseQuery(m queryMap, query string) (err error) { for query != "" { key := query if i := strings.Index(key, "&"); i >= 0 { key, query = key[:i], key[i+1:] } else { query = "" } if key == "" { continue } value := "" if i := strings.Index(key, "="); i >= 0 { key, value = key[:i], key[i+1:] } key, err1 := url.QueryUnescape(key) if err1 != nil { if err == nil { err = err1 } continue } value, err1 = url.QueryUnescape(value) if err1 != nil { if err == nil { err = err1 } continue } m[key] = value } return err }
// restRemovePoolVirtualIP deletes virtualip func restRemovePoolVirtualIP(w *rest.ResponseWriter, r *rest.Request, ctx *requestContext) { ip, err := url.QueryUnescape(r.PathParam("ip")) if err != nil { glog.Errorf("Could not get virtual ip (%v): %v", ip, err) restBadRequest(w, err) return } poolId, err := url.QueryUnescape(r.PathParam("poolId")) if err != nil { glog.Errorf("Could not get virtual ip poolId (%v): %v", poolId, err) restBadRequest(w, err) return } glog.V(0).Infof("Remove virtual ip=%v (in pool %v)", ip, poolId) client, err := ctx.getMasterClient() if err != nil { restServerError(w, err) return } request := pool.VirtualIP{PoolID: poolId, IP: ip} if err := client.RemoveVirtualIP(request); err != nil { glog.Errorf("Failed to remove virtual IP(%+v): %v", request, err) restServerError(w, err) return } restSuccess(w) }
func findHandler(w http.ResponseWriter, r *http.Request) { r.ParseForm() expr := r.Form.Get("expr") source := r.Form.Get("source") var err error expr, err = url.QueryUnescape(expr) source, err = url.QueryUnescape(source) //fmt.Println(expr); //fmt.Println(source); re, err := regexp.Compile(expr) if nil != err { log.Println(err) } else if len(expr) > 0 { markTmpl := fmt.Sprintf("%s%s%s$0%s/%s%s", openTag, markTag, closeTag, openTag, markTag, closeTag) source = re.ReplaceAllString(source, markTmpl) } source = html.EscapeString(source) source = strings.Replace(source, "%", "%%", -1) source = strings.Replace(source, openTag, "<", -1) source = strings.Replace(source, closeTag, ">", -1) source = strings.Replace(source, "\n", "<br/>", -1) respMsg := source fmt.Fprintf(w, respMsg) }
func google(paginas int) []string { fmt.Println("[+]Buscador Google em função beta...") fmt.Println("[+]CAPTCHA aparece de vez em quando...") regex_google = `"><a href="/url\?q=(.*?)&sa=U&` dork_escaped := url.QueryEscape(dork_comando) if paginas <= 1 { recebe_download := html_download("https://www.google.com.br/search?q=" + dork_escaped + "&oq=" + dork_escaped + "&gws_rd=cr,ssl&client=ubuntu&ie=UTF-8") resultado := parser(recebe_download, regex_google) for i := range resultado { url_unescaped, err := url.QueryUnescape(resultado[i][1]) erro(err) resultado_slice_2 = append(resultado_slice_2, url_unescaped) } } else if paginas > 1 { for pa := 1; pa <= paginas; pa++ { pa_str := strconv.Itoa(pa) url_paginas := ("https://www.google.com.br/search?q=" + dork_escaped + "&start=" + pa_str + "0") //ulr pa google recebe_download := html_download(url_paginas) resultado := parser(recebe_download, regex_google) for i := range resultado { url_unescaped, err := url.QueryUnescape(resultado[i][1]) erro(err) resultado_slice_2 = append(resultado_slice_2, url_unescaped) } } } return resultado_slice_2 }
func prevHandler(w http.ResponseWriter, r *http.Request, urlpath string) { ok, err := regexp.MatchString( "^"+protocol+"://"+*host+picpattern+".*$", (*r).Referer()) httpErr(err) if !ok { http.NotFound(w, r) return } prefix := len(protocol + "://" + *host) picPath := (*r).Referer()[prefix:] if path.IsAbs(picPath) { picPath = picPath[1:] } words := strings.Split(picPath, string(filepath.Separator)) file, err := url.QueryUnescape(path.Join(words[2:]...)) httpErr(err) tag, err := url.QueryUnescape(words[1]) httpErr(err) s := getPrev(file, tag) if s == "" { s = file } s = path.Join(picpattern, tag, s) http.Redirect(w, r, s, http.StatusFound) }
// // Парсит строку запроса // func parseQuery(s string) (query, error) { q := make(query, 0) for s != "" { key := s if i := strings.IndexAny(key, "&;"); i >= 0 { key, s = key[:i], key[i+1:] } else { s = "" } if key == "" { continue } value := "" if i := strings.Index(key, "="); i >= 0 { key, value = key[:i], key[i+1:] } key, err := url.QueryUnescape(key) if err != nil { return q, err } value, err = url.QueryUnescape(value) if err != nil { return q, err } q = append(q, map[string]string{ key: value, }) } return q, nil }
// Parse query parameters and return them in the right order func ParseQueryParameters(query string) (params []QueryParameter, err error) { for query != "" { key := query if i := strings.IndexAny(key, "&;"); i >= 0 { key, query = key[:i], key[i+1:] } else { query = "" } if key == "" { continue } value := "" if i := strings.Index(key, "="); i >= 0 { key, value = key[:i], key[i+1:] } key, err1 := url.QueryUnescape(key) if err1 != nil { if err == nil { err = err1 } continue } value, err1 = url.QueryUnescape(value) if err1 != nil { if err == nil { err = err1 } continue } params = append(params, QueryParameter{key, value}) } return params, err }
// SplitLDAPQuery splits the query in the URL into the substituent parts. All sections are optional. // Query syntax is attribute?scope?filter?extensions func SplitLDAPQuery(query string) (attributes, scope, filter, extensions string, err error) { parts := strings.Split(query, "?") switch len(parts) { case 4: extensions = parts[3] fallthrough case 3: if v, err := url.QueryUnescape(parts[2]); err != nil { return "", "", "", "", err } else { filter = v } fallthrough case 2: if v, err := url.QueryUnescape(parts[1]); err != nil { return "", "", "", "", err } else { scope = v } fallthrough case 1: if v, err := url.QueryUnescape(parts[0]); err != nil { return "", "", "", "", err } else { attributes = v } return attributes, scope, filter, extensions, nil case 0: return default: err = fmt.Errorf("too many query options %q", query) return "", "", "", "", err } }
// NewSpanFromHeader decodes a Span with operation name `operationName` from // `h`, expecting that header values are URL-escpaed. // // If `operationName` is empty, the caller must later call // `Span.SetOperationName` on the returned `Span`. func JoinTraceFromHeader( operationName string, h http.Header, propagator SpanPropagator, ) (Span, error) { contextIDMap := make(map[string]string) tagsMap := make(map[string]string) for key, val := range h { if strings.HasPrefix(key, ContextIDHTTPHeaderPrefix) { // We don't know what to do with anything beyond slice item v[0]: unescaped, err := url.QueryUnescape(val[0]) if err != nil { return nil, err } contextIDMap[strings.TrimPrefix(key, ContextIDHTTPHeaderPrefix)] = unescaped } else if strings.HasPrefix(key, TagsHTTPHeaderPrefix) { // We don't know what to do with anything beyond slice item v[0]: unescaped, err := url.QueryUnescape(val[0]) if err != nil { return nil, err } tagsMap[strings.TrimPrefix(key, TagsHTTPHeaderPrefix)] = unescaped } } return propagator.JoinTraceFromText(operationName, contextIDMap, tagsMap) }
func TestV1Metric(t *testing.T) { r := startV1API(getDefaultMockConfig(), "metric") Convey("Test Metric REST API V1", t, func() { Convey("Get metrics - v1/metrics", func() { resp, err := http.Get( fmt.Sprintf("http://localhost:%d/v1/metrics", r.port)) So(err, ShouldBeNil) So(resp.StatusCode, ShouldEqual, 200) body, err := ioutil.ReadAll(resp.Body) So(err, ShouldBeNil) resp1, err := url.QueryUnescape(string(body)) So(err, ShouldBeNil) So( fmt.Sprintf(fixtures.GET_METRICS_RESPONSE, r.port), ShouldResemble, resp1) }) Convey("Get metrics from tree - v1/metrics/*namespace", func() { resp, err := http.Get( fmt.Sprintf("http://localhost:%d/v1/metrics/*namespace", r.port)) So(err, ShouldBeNil) So(resp.StatusCode, ShouldEqual, 200) body, err := ioutil.ReadAll(resp.Body) So(err, ShouldBeNil) resp1, err := url.QueryUnescape(string(body)) So(err, ShouldBeNil) So( fmt.Sprintf(fixtures.GET_METRICS_RESPONSE, r.port), ShouldResemble, resp1) }) }) }
func SignUp(w http.ResponseWriter, r *http.Request) { var err error var tmpl *plate.Template params := r.URL.Query() error := params.Get("error") error, _ = url.QueryUnescape(error) fname := params.Get("fname") fname, _ = url.QueryUnescape(fname) lname := params.Get("lname") lname, _ = url.QueryUnescape(lname) email := params.Get("email") email, _ = url.QueryUnescape(email) username := params.Get("username") username, _ = url.QueryUnescape(username) var submitted bool submitted, err = strconv.ParseBool(params.Get("submitted")) if err != nil { submitted = false } server := plate.NewServer() tmpl, err = server.Template(w) if err != nil { plate.Serve404(w, err.Error()) return } tmpl.Bag["PageTitle"] = "Register" tmpl.Bag["Error"] = strings.ToTitle(error) tmpl.Bag["Fname"] = strings.TrimSpace(fname) tmpl.Bag["Lname"] = strings.TrimSpace(lname) tmpl.Bag["Email"] = strings.TrimSpace(email) tmpl.Bag["Username"] = strings.TrimSpace(username) tmpl.Bag["CurrentYear"] = time.Now().Year() tmpl.Bag["Submitted"] = submitted tmpl.Bag["userID"] = 0 tmpl.FuncMap["isNotNull"] = func(str string) bool { if strings.TrimSpace(str) != "" && len(strings.TrimSpace(str)) > 0 { return true } return false } tmpl.FuncMap["isLoggedIn"] = func() bool { return false } templates := append(TemplateFiles, "templates/auth/signup.html") tmpl.DisplayMultiple(templates) }
func handler(f func(w http.ResponseWriter, r *http.Request, jobPath, body string)) http.HandlerFunc { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { debug("http:", r.Method, r.RequestURI) var err error jobPath := r.URL.Query().Get(":jobPath") if jobPath == "" { http.Error(w, "empty routing key", http.StatusBadRequest) return } jobPath, err = url.QueryUnescape(jobPath) if err != nil { http.Error(w, err.Error(), http.StatusBadRequest) return } jobBody := r.URL.Query().Get(":jobBody") if jobBody == "" { http.Error(w, "empty job", http.StatusBadRequest) return } jobBody, err = url.QueryUnescape(jobBody) if err != nil { http.Error(w, err.Error(), http.StatusBadRequest) return } f(w, r, jobPath, jobBody) }) }
func readLineKeyValue(br *bufio.Reader) (*KeyValue, error) { k, err := br.ReadString('\t') if err != nil { return nil, err } k = strings.TrimRight(k, "\t") keys := strings.SplitN(k, ",", 2) var reduceKey string var sortKey string reduceKey, err = url.QueryUnescape(keys[0]) if err != nil { return nil, err } if len(keys) == 2 { sortKey, err = url.QueryUnescape(keys[1]) if err != nil { return nil, err } } v, err := br.ReadString('\n') if err != nil { return nil, err } v = strings.TrimRight(v, "\n") return &KeyValue{reduceKey, sortKey, v}, nil }
// ParseQuery provides an alternative to url.ParseQuery when the order of parameters must be retained. ParseQuery // parses the URL-encoded query string and returns a URLQueryParameters object that can be used to get the ordered list // of parameters, a map of the parameters, or parameters by name. ParseQuery always returns a non-nil // URLQueryParameters object containing all the valid query parameters found; err describes the first decoding error // encountered, if any. func ParseQuery(query string) (u URLQueryParameters, err error) { // Replace ";" with "&" so we can split on a single character query = strings.Replace(query, ";", "&", -1) // Split it into parts (e.g., "foo=bar" is a part) parts := strings.Split(query, "&") // iterate the parts and add them to the URLQueryParameters for _, part := range parts { if i := strings.Index(part, "="); i >= 0 { key, value := part[:i], part[i+1:] key, keyErr := url.QueryUnescape(key) if keyErr != nil { if err == nil { err = keyErr } continue } value, valueErr := url.QueryUnescape(value) if valueErr != nil { if err == nil { err = valueErr } continue } u.Add(key, value) } } return }
func downloadServiceStateLogs(w *rest.ResponseWriter, r *rest.Request, client *node.ControlClient) { serviceStateID, err := url.QueryUnescape(r.PathParam("serviceStateId")) if err != nil { w.WriteHeader(http.StatusInternalServerError) w.Write([]byte(fmt.Sprintf("Bad Request: %v", err))) return } serviceID, err := url.QueryUnescape(r.PathParam("serviceId")) if err != nil { w.WriteHeader(http.StatusInternalServerError) w.Write([]byte(fmt.Sprintf("Bad Request: %v", err))) return } request := dao.ServiceStateRequest{serviceID, serviceStateID} var logs string err = client.GetServiceStateLogs(request, &logs) if err != nil { glog.Errorf("Unexpected error getting service state logs: %v", err) w.WriteHeader(http.StatusInternalServerError) w.Write([]byte(fmt.Sprintf("Internal Server Error: %v", err))) return } var filename = serviceID + time.Now().Format("2006-01-02-15-04-05") + ".log" w.Header().Set("Content-Disposition", "attachment; filename="+filename) w.Header().Set("Content-Type", r.Header.Get("Content-Type")) w.Write([]byte(logs)) }
// TraceContextFromHeader decodes a TraceContext from `h`, expecting that // header values are URL-escpaed. func TraceContextFromHeader( h http.Header, decoder TraceContextDecoder, ) (TraceContext, error) { contextIDMap := make(map[string]string) tagsMap := make(map[string]string) for key, val := range h { if strings.HasPrefix(key, ContextIDHTTPHeaderPrefix) { // We don't know what to do with anything beyond slice item v[0]: unescaped, err := url.QueryUnescape(val[0]) if err != nil { return nil, err } contextIDMap[strings.TrimPrefix(key, ContextIDHTTPHeaderPrefix)] = unescaped } else if strings.HasPrefix(key, TagsHTTPHeaderPrefix) { // We don't know what to do with anything beyond slice item v[0]: unescaped, err := url.QueryUnescape(val[0]) if err != nil { return nil, err } tagsMap[strings.TrimPrefix(key, TagsHTTPHeaderPrefix)] = unescaped } } return decoder.TraceContextFromText(contextIDMap, tagsMap) }
// EditBookmarkHandler writes out response to editing a URL func EditBookmarkHandler(req *http.Request, w http.ResponseWriter, cs *sessions.CookieStore, connection *Connection, params martini.Params) { // We use a map instead of Bookmark because id would be "" bookmark := make(map[string]interface{}) bookmark["Title"], _ = url.QueryUnescape(req.PostFormValue("title")) bookmark["Url"], _ = url.QueryUnescape(req.PostFormValue("url")) if !IsValidURL(bookmark["Url"].(string)) || len(bookmark["Title"].(string)) < 1 { WriteJSONResponse(200, true, "The url is not valid or the title is empty.", req, w) } else { _, userID := GetUserData(cs, req) tags, _ := url.QueryUnescape(req.PostFormValue("tags")) if tags != "" { bookmark["Tags"] = strings.Split(tags, ",") for i, v := range bookmark["Tags"].([]string) { bookmark["Tags"].([]string)[i] = strings.ToLower(strings.TrimSpace(v)) } } response, err := connection.EditBookmark(userID, params, bookmark) if err != nil { WriteJSONResponse(200, true, "Error deleting bookmark.", req, w) } else { if response.Updated > 0 || response.Unchanged > 0 || response.Replaced > 0 { WriteJSONResponse(200, false, "Bookmark updated successfully.", req, w) } else { WriteJSONResponse(200, true, "Error updating bookmark.", req, w) } } } }
// NewBookmarkHandler writes out new bookmark JSON response func NewBookmarkHandler(req *http.Request, w http.ResponseWriter, cs *sessions.CookieStore, connection *Connection) { // We use a map instead of Bookmark because id would be "" bookmark := make(map[string]interface{}) bookmark["Title"], _ = url.QueryUnescape(req.PostFormValue("title")) bookmark["Url"], _ = url.QueryUnescape(req.PostFormValue("url")) if !IsValidURL(bookmark["Url"].(string)) || len(bookmark["Title"].(string)) < 1 { WriteJSONResponse(200, true, "The url is not valid or the title is empty.", req, w) } else { _, userID := GetUserData(cs, req) tags, _ := url.QueryUnescape(req.PostFormValue("tags")) if tags != "" { bookmark["Tags"] = strings.Split(tags, ",") for i, v := range bookmark["Tags"].([]string) { bookmark["Tags"].([]string)[i] = strings.ToLower(strings.TrimSpace(v)) } } bookmark["Created"] = float64(time.Now().Unix()) bookmark["Date"] = time.Unix(int64(bookmark["Created"].(float64)), 0).Format("Jan 2, 2006 at 3:04pm") bookmark["User"] = userID response, _ := connection.NewBookmark(userID, bookmark) if response.Inserted > 0 { WriteJSONResponse(200, false, response.GeneratedKeys[0], req, w) } else { WriteJSONResponse(200, true, "Error inserting bookmark.", req, w) } } }
// XssDoubq double-unencodes the "in" cgi parameter func XssDoubq(w http.ResponseWriter, r *http.Request) *LabResp { input := &InData{} rawParams := make(map[string][]string) ParseRawQuery(rawParams, r.URL.RawQuery) inputRaw, ok := rawParams["in"] if ok && len(inputRaw) > 0 { // imitate a bad way to do input validation - single level unescape input.InRaw = inputRaw[0] // one-level escape unesc1, err := url.QueryUnescape(inputRaw[0]) if err != nil { fmt.Printf("ERROR in the first url.QueryUnescape on %s\n", inputRaw[0]) return &LabResp{Err: nil, Code: http.StatusBadRequest} } unesc1 = Transform(unesc1, TagsOff, QuotesOff) unesc, err := url.QueryUnescape(unesc1) if err != nil { fmt.Printf("ERROR in the second url.QueryUnescape on %s\n", unesc1) return &LabResp{Err: nil, Code: http.StatusBadRequest} } input.In = unesc } err := DoTemplate(w, r.URL.Path, input) if err != nil { log.Printf("Error in DoTemplate: %s\n", err) return &LabResp{Err: nil, Code: http.StatusInternalServerError} } return &LabResp{Err: nil, Code: http.StatusOK} }
// adapted from golang std lib: https://golang.org/src/net/url/url.go#L546 func parseQuery(m url.Values, query string) (err error) { for query != "" { key := query if i := strings.IndexAny(key, "&;"); i >= 0 { key, query = key[:i], key[i+1:] } else { query = "" } if key == "" { continue } value := noQueryValue if i := strings.Index(key, "="); i >= 0 { key, value = key[:i], key[i+1:] } key, err1 := url.QueryUnescape(key) if err1 != nil { if err == nil { err = err1 } continue } if value != noQueryValue { value, err1 = url.QueryUnescape(value) if err1 != nil { if err == nil { err = err1 } continue } } m[key] = append(m[key], value) } return err }
// parseDSNParams parses the DSN "query string" // Values must be url.QueryEscape'ed func parseDSNParams(cfg *Configuration, params string) (err error) { for _, v := range strings.Split(params, "&") { param := strings.SplitN(v, "=", 2) if len(param) != 2 { continue } // cfg params switch value := param[1]; param[0] { // Time Location case "loc": if value, err = url.QueryUnescape(value); err != nil { return } cfg.Location, err = time.LoadLocation(value) if err != nil { return } // Dial Timeout case "networkTimeout": cfg.NetworkTimeout, err = time.ParseDuration(value) if err != nil { return } // TLS-Encryption case "tls": boolValue, isBool := readBool(value) if isBool { if boolValue { cfg.TLS = &tls.Config{} } } else { if strings.ToLower(value) == "skip-verify" { cfg.TLS = &tls.Config{InsecureSkipVerify: true} } else { return fmt.Errorf("Invalid value / unknown config name: %s", value) } } default: // lazy init if cfg.Params == nil { cfg.Params = make(map[string]string) } if cfg.Params[param[0]], err = url.QueryUnescape(value); err != nil { return } } } return }
func (b *B2) downloadFile(resp *http.Response) (*File, io.ReadCloser, error) { switch resp.StatusCode { case 200: default: if err := b.parseError(resp); err != nil { resp.Body.Close() return nil, nil, err } resp.Body.Close() return nil, nil, fmt.Errorf("Unrecognised status code: %d", resp.StatusCode) } name, err := url.QueryUnescape(resp.Header.Get("X-Bz-File-Name")) if err != nil { resp.Body.Close() return nil, nil, err } file := &File{ Id: resp.Header.Get("X-Bz-File-Id"), Name: name, ContentSha1: resp.Header.Get("X-Bz-Content-Sha1"), ContentType: resp.Header.Get("Content-Type"), FileInfo: make(map[string]string), } size, err := strconv.ParseInt(resp.Header.Get("Content-Length"), 10, 64) if err != nil { resp.Body.Close() return nil, nil, err } file.ContentLength = size for k, v := range resp.Header { if strings.HasPrefix(k, "X-Bz-Info-") { key, err := url.QueryUnescape(k[len("X-Bz-Info-"):]) if err != nil { key = k[len("X-Bz-Info-"):] log.Printf("Unable to decode key: %q", key) } value, err := url.QueryUnescape(v[0]) if err != nil { value = v[0] log.Printf("Unable to decode value: %q", value) } file.FileInfo[key] = value } } return file, resp.Body, nil }
func EditPost(w http.ResponseWriter, r *http.Request) { tmpl := plate.NewTemplate(w) id, _ := strconv.Atoi(r.URL.Query().Get(":id")) error, _ := url.QueryUnescape(r.URL.Query().Get("error")) message, _ := url.QueryUnescape(r.URL.Query().Get("message")) log.Println(message) post, _ := models.Post{ID: id}.Get() session, _ := store.Get(r, "adminstuffs") if pjson := session.Flashes("post"); len(pjson) > 0 { json.Unmarshal([]byte(pjson[0].(string)), &post) session.Save(r, w) } if strings.TrimSpace(error) != "" { tmpl.Bag["error"] = error } if strings.TrimSpace(message) != "" { tmpl.Bag["message"] = message } u := models.User{} users, _ := u.GetAll() tmpl.FuncMap["isUser"] = func(uid int) bool { return uid == post.UserID } tmpl.FuncMap["formatDate"] = func(dt time.Time) string { tlayout := "01/02/2006 3:04 PM" Local, _ := time.LoadLocation("US/Central") return dt.In(Local).Format(tlayout) } tmpl.FuncMap["hasCategory"] = func(cid int) bool { for _, cat := range post.Categories { if cid == cat.ID { return true } } return false } tmpl.Bag["PageTitle"] = "Edit Blog Post" tmpl.Bag["type"] = "Edit" tmpl.Bag["categories"], _ = models.BlogCategory{}.GetAll() tmpl.Bag["users"] = users tmpl.Bag["post"] = post tmpl.ParseFile("templates/blog/navigation.html", false) tmpl.ParseFile("templates/blog/postform.html", false) err := tmpl.Display(w) if err != nil { log.Println(err) } }
func EditContent(w http.ResponseWriter, r *http.Request) { tmpl := plate.NewTemplate(w) params := r.URL.Query() id, err := strconv.Atoi(params.Get(":id")) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } revid, err := strconv.Atoi(params.Get(":revid")) if err != nil { revid = 0 } error, _ := url.QueryUnescape(params.Get("error")) if len(strings.TrimSpace(error)) > 0 { tmpl.Bag["error"] = error } message, _ := url.QueryUnescape(params.Get("message")) if len(strings.TrimSpace(message)) > 0 { tmpl.Bag["message"] = message } tmpl.FuncMap["isNotZero"] = func(num int) bool { return num != 0 } content := models.Content{ID: id} content, err = content.Get() if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } tmpl.FuncMap["formatDate"] = func(dt time.Time) string { tlayout := "Mon, 01/02/06, 3:04PM MST" Local, _ := time.LoadLocation("US/Central") return dt.In(Local).Format(tlayout) } revision := content.ActiveRevision if revid != 0 { revision = content.Revisions.GetRevision(revid) } tmpl.Bag["PageTitle"] = "Edit Content" tmpl.Bag["content"] = content tmpl.Bag["revision"] = revision tmpl.ParseFile("templates/website/navigation.html", false) tmpl.ParseFile("templates/website/editcontent.html", false) err = tmpl.Display(w) if err != nil { log.Println(err) } }
func (p *HTTPPool) ServeHTTP(w http.ResponseWriter, r *http.Request) { // Parse request. if !strings.HasPrefix(r.URL.Path, p.basePath) { panic("HTTPPool serving unexpected path: " + r.URL.Path) } parts := strings.SplitN(r.URL.Path[len(p.basePath):], "/", 2) if len(parts) != 2 { http.Error(w, "bad request", http.StatusBadRequest) return } groupName, err := url.QueryUnescape(parts[0]) if err != nil { http.Error(w, "decoding group: "+err.Error(), http.StatusBadRequest) return } key, err := url.QueryUnescape(parts[1]) if err != nil { http.Error(w, "decoding key: "+err.Error(), http.StatusBadRequest) return } // Fetch the value for this group/key. group := GetGroup(groupName) if group == nil { http.Error(w, "no such group: "+groupName, http.StatusNotFound) return } var ctx Context if p.Context != nil { ctx = p.Context(r) } group.Stats.ServerRequests.Add(1) var value []byte err = group.Get(ctx, key, AllocatingByteSliceSink(&value)) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } // Write the value to the response body as a proto message. body, err := proto.Marshal(&pb.GetResponse{Value: value}) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } w.Header().Set("Content-Type", "application/x-protobuf") w.Write(body) }
func parseStringArrMap(stringArrMap map[string][]string) []NameValuePair { index := 0 harQueryString := make([]NameValuePair, len(stringArrMap)) for k, v := range stringArrMap { escapedKey, _ := url.QueryUnescape(k) escapedValues, _ := url.QueryUnescape(strings.Join(v, ",")) harNameValuePair := NameValuePair{ Name: escapedKey, Value: escapedValues, } harQueryString[index] = harNameValuePair index++ } return harQueryString }
func (s *ClientTests) TestPutGetAndDeleteUserPolicy(c *check.C) { userResp, err := s.iam.CreateUser("gopher", "/gopher/") c.Assert(err, check.IsNil) defer s.iam.DeleteUser(userResp.User.Name) document := `{ "Statement": [ { "Action": [ "s3:*" ], "Effect": "Allow", "Resource": [ "arn:aws:s3:::8shsns19s90ajahadsj/*", "arn:aws:s3:::8shsns19s90ajahadsj" ] }] }` _, err = s.iam.PutUserPolicy(userResp.User.Name, "EverythingS3", document) c.Assert(err, check.IsNil) resp, err := s.iam.GetUserPolicy(userResp.User.Name, "EverythingS3") c.Assert(err, check.IsNil) c.Assert(resp.Policy.Name, check.Equals, "EverythingS3") c.Assert(resp.Policy.UserName, check.Equals, userResp.User.Name) gotDocument, err := url.QueryUnescape(resp.Policy.Document) c.Assert(err, check.IsNil) c.Assert(gotDocument, check.Equals, document) _, err = s.iam.DeleteUserPolicy(userResp.User.Name, "EverythingS3") c.Assert(err, check.IsNil) _, err = s.iam.GetUserPolicy(userResp.User.Name, "EverythingS3") c.Assert(err, check.NotNil) }
func imageFromUrl(w http.ResponseWriter, r *http.Request) (*imagick.MagickWand, error) { queryUrl := r.URL.Query().Get("url") url, _ := url.QueryUnescape(queryUrl) resp, err := http.Get(url) if err != nil { return nil, err } data, err := ioutil.ReadAll(resp.Body) if err != nil { http.Error(w, "Error retrieving url", 500) return nil, err } wand := imagick.NewMagickWand() err = wand.ReadImageBlob(data) if err != nil { http.Error(w, "Error retrieving url", 500) return nil, err } if err = wand.SetImageFormat("JPG"); err != nil { http.Error(w, "Error retrieving url", 500) return nil, err } wand.AutoLevelImage() return wand, nil }
// getSid retrieves session identifier from HTTP Request. // First try to retrieve id by reading from cookie, session cookie name is configurable, // if not exist, then retrieve id from querying parameters. // // error is not nil when there is anything wrong. // sid is empty when need to generate a new session id // otherwise return an valid session id. func (manager *Manager) getSid(r *http.Request) (string, error) { cookie, errs := r.Cookie(manager.config.CookieName) if errs != nil || cookie.Value == "" { var sid string if manager.config.EnableSidInUrlQuery { errs := r.ParseForm() if errs != nil { return "", errs } sid = r.FormValue(manager.config.CookieName) } // if not found in Cookie / param, then read it from request headers if manager.config.EnableSidInHttpHeader && sid == "" { sids, isFound := r.Header[manager.config.SessionNameInHttpHeader] if isFound && len(sids) != 0 { return sids[0], nil } } return sid, nil } // HTTP Request contains cookie for sessionid info. return url.QueryUnescape(cookie.Value) }