func parseForm(m map[string][]string, query string) (err os.Error) { data := make(map[string]*vector.StringVector) for _, kv := range strings.Split(query, "&") { kvPair := strings.SplitN(kv, "=", 2) var key, value string var e os.Error key, e = url.QueryUnescape(kvPair[0]) if e == nil && len(kvPair) > 1 { value, e = url.QueryUnescape(kvPair[1]) } if e != nil { err = e } vec, ok := data[key] if !ok { vec = new(vector.StringVector) data[key] = vec } vec.Push(value) } for k, vec := range data { m[k] = vec.Copy() } return }
func (up *URLParse) urlparser(c chan string, tf chan map[string]string) { ed2k, _ := regexp.Compile("href=\"ed2k://") re, _ := regexp.Compile("<([^>]|\n)*>|\t|\r") parsedlink := make(map[string]string) for i := 0; i < up.size; i++ { if pas, err := ContGet(<-c); err == nil { pas = ed2k.ReplaceAllString(pas, ">\ned2k://") pas = re.ReplaceAllString(pas, "\n") pasarray := strings.Split(pas, "\n") for is := 1; is < len(pasarray); is++ { if strings.HasPrefix(pasarray[is], "ed2k://") { stringindex := strings.Index(pasarray[is], "\"") var edurl string if stringindex < 1 { edurl = pasarray[is] edurl, _ = url.QueryUnescape(edurl) } else { edurl = pasarray[is][0:stringindex] edurl, _ = url.QueryUnescape(edurl) } spedurl := strings.Split(edurl, "|") if len(spedurl) > 5 && len(spedurl[4]) > 20 { key := spedurl[4] parsedlink[key] = edurl } } } } else { fmt.Printf("can't open url; err=%s\n", err.String()) } } tf <- parsedlink return }
// find the handler and path parameters given the path component of the request // URL and the request method. func (router *Router) find(path string, method string) (Handler, []string, []string) { for _, r := range router.routes { values := r.regexp.FindStringSubmatch(path) if len(values) == 0 { continue } if r.addSlash && path[len(path)-1] != '/' { return HandlerFunc(addSlash), nil, nil } values = values[1:] for j := 0; j < len(values); j++ { if value, e := url.QueryUnescape(values[j]); e != nil { return routerError(StatusNotFound), nil, nil } else { values[j] = value } } if handler := r.handlers[method]; handler != nil { return handler, r.names, values } if method == "HEAD" { if handler := r.handlers["GET"]; handler != nil { return handler, r.names, values } } if handler := r.handlers["*"]; handler != nil { return handler, r.names, values } return routerError(StatusMethodNotAllowed), nil, nil } return routerError(StatusNotFound), nil, nil }
func main() { rankch := make(chan langpop) nlangs := 0 for _, lang := range languages() { go func(lang string) { rankch <- langpop{lang, popularity(lang)} }(lang) nlangs++ } ranks := make(map[int]string) for r := 1; ; r++ { for { if _, ok := ranks[r]; ok { break } else if nlangs == 0 { return } lp := <-rankch //~ println(lp.Rank, lp.Lang) ranks[lp.Rank] = lp.Lang nlangs-- } plang, err := url.QueryUnescape(ranks[r]) if err != nil { panic(err) } println(r, plang) } }
func draftFilename(req *http.Request) string { rawPath := strings.SplitN(req.URL.String(), "/", 3) rawFilename, err := url.QueryUnescape(rawPath[2]) if err != nil { log.Fatal(err) } return filepath.Join(DRAFTS_DIR, rawFilename) }
// credentials returns OAuth credentials stored in cookie with name key. func credentials(req *web.Request, key string) (*oauth.Credentials, os.Error) { s := req.Cookie.Get(key) if s == "" { return nil, os.NewError("main: missing cookie") } a := strings.Split(s, "/") if len(a) != 2 { return nil, os.NewError("main: bad credential cookie") } token, err := url.QueryUnescape(a[0]) if err != nil { return nil, os.NewError("main: bad credential cookie") } secret, err := url.QueryUnescape(a[1]) if err != nil { return nil, os.NewError("main: bad credential cookie") } return &oauth.Credentials{token, secret}, nil }
// acccessToken returns OAuth2 access token stored in a cookie. func accessToken(req *web.Request) (string, os.Error) { s := req.Cookie.Get("fbtok") if s == "" { return "", os.NewError("main: missing cookie") } token, err := url.QueryUnescape(s) if err != nil { return "", os.NewError("main: bad credential cookie") } return token, nil }
func (p *ContactGroup) GroupUserId() string { //http://www.google.com/m8/feeds/groups/some_email_address/base/some_id_number arr := strings.Split(p.Id.Value, "/") if len(arr) > 3 { s, err := url.QueryUnescape(arr[len(arr)-3]) if err != nil { return arr[len(arr)-3] } return s } return "" }
func parseForm(m map[string][]string, query string) (err os.Error) { for _, kv := range strings.Split(query, "&") { kvPair := strings.SplitN(kv, "=", 2) var key, value string var e os.Error key, e = url.QueryUnescape(kvPair[0]) if e == nil && len(kvPair) > 1 { value, e = url.QueryUnescape(kvPair[1]) } if e != nil { err = e } vec, ok := m[key] if !ok { vec = []string{} } m[key] = append(vec, value) } return }
// TODO: this is SILLY. func addQueryParams(resource string, params map[string]string) string { str := resource first := true for k, v := range params { if first { str += "?" first = false } else { str += "&" } rawv, err := url.QueryUnescape(v) if err == nil { v = rawv } str += k + "=" + url.QueryEscape(v) } return str }
func languages() (langs []string) { resp, err := http.Get("http://github.com/languages") if err != nil { panic(err) } body, err := ioutil.ReadAll(resp.Body) if err != nil { panic(err) } seen := make(map[string]bool) for _, subm := range langre.FindAllStringSubmatch(string(body), -1) { unlang, err := url.QueryUnescape(subm[1]) if err != nil { panic(err) } if !seen[unlang] { seen[unlang] = true langs = append(langs, subm[1]) } } return }
func unescapeParams(p map[string]string) { for k, v := range p { uv, _ := url.QueryUnescape(v) p[k] = uv } }
// readCookies parses all "Cookie" values from // the header h, removes the successfully parsed values from the // "Cookie" key in h and returns the parsed Cookies. func readCookies(h http.Header) []*http.Cookie { cookies := []*http.Cookie{} lines, ok := h["Cookie"] if !ok { return cookies } unparsedLines := []string{} for _, line := range lines { parts := strings.Split(strings.TrimSpace(line), ";") if len(parts) == 1 && parts[0] == "" { continue } // Per-line attributes var lineCookies = make(map[string]string) var path string var domain string var httponly bool for i := 0; i < len(parts); i++ { parts[i] = strings.TrimSpace(parts[i]) if len(parts[i]) == 0 { continue } attr, val := parts[i], "" var err os.Error if j := strings.Index(attr, "="); j >= 0 { attr, val = attr[:j], attr[j+1:] val, err = url.QueryUnescape(val) if err != nil { continue } } switch strings.ToLower(attr) { case "$httponly": httponly = true case "$domain": domain = val // TODO: Add domain parsing case "$path": path = val // TODO: Add path parsing default: lineCookies[attr] = val } } if len(lineCookies) == 0 { unparsedLines = append(unparsedLines, line) } for n, v := range lineCookies { cookies = append(cookies, &http.Cookie{ Name: n, Value: v, Path: path, Domain: domain, HttpOnly: httponly, MaxAge: -1, Raw: line, }) } } h["Cookie"] = unparsedLines, len(unparsedLines) > 0 return cookies }