func (s *charmsSuite) TestGetRejectsWrongEnvUUIDPath(c *gc.C) { url := s.charmsURL(c, "url=local:quantal/dummy-1&file=revision") url.Path = "/environment/dead-beef-123456/charms" resp, err := s.authRequest(c, "GET", url.String(), "", nil) c.Assert(err, jc.ErrorIsNil) s.assertErrorResponse(c, resp, http.StatusNotFound, `unknown environment: "dead-beef-123456"`) }
func (p *Prerender) buildURL(or *http.Request) string { url := p.Options.PrerenderURL if !strings.HasSuffix(url.String(), "/") { url.Path = url.Path + "/" } var protocol = or.URL.Scheme if cf := or.Header.Get("CF-Visitor"); cf != "" { match := cfSchemeRegex.FindStringSubmatch(cf) if len(match) > 1 { protocol = match[1] } } if len(protocol) == 0 { protocol = "http" } if fp := or.Header.Get("X-Forwarded-Proto"); fp != "" { protocol = strings.Split(fp, ",")[0] } apiURL := url.String() + protocol + "://" + or.Host + or.URL.Path + "?" + or.URL.RawQuery return apiURL }
func Test_parseSearchResponse(t *testing.T) { responseBody := "HTTP/1.1 200 OK\r\n" + "CACHE-CONTROL: max-age=100\r\n" + "EXT:\r\n" + "LOCATION: http://10.1.2.3:80/description.xml\r\n" + "SERVER: FreeRTOS/6.0.5, UPnP/1.0, IpBridge/0.1\r\n" + "ST: upnp:rootdevice\r\n" + "USN: uuid:2f402f80-da50-11e1-9b23-0017880a4c69::upnp:rootdevice\r\n" + "Date: Sun, 18 Aug 2013 08:49:37 GMT\r\n" + "\r\n" responseAddr, _ := net.ResolveUDPAddr("udp", "10.1.2.3:1900") response, err := parseSearchResponse(strings.NewReader(responseBody), responseAddr) if err != nil { t.Fatal("Error while parsing the response.", err) } assertEqual(t, "max-age=100", response.Control, "response.Control") assertEqual(t, "FreeRTOS/6.0.5, UPnP/1.0, IpBridge/0.1", response.Server, "response.Server") assertEqual(t, "upnp:rootdevice", response.ST, "response.ST") assertEqual(t, "", response.Ext, "response.Ext") assertEqual(t, "uuid:2f402f80-da50-11e1-9b23-0017880a4c69::upnp:rootdevice", response.USN, "response.USN") assertEqual(t, responseAddr, response.ResponseAddr, "response.Addr") url, _ := url.Parse("http://10.1.2.3:80/description.xml") if url.String() != response.Location.String() { t.Errorf("%q is not equal to %q. %q", url.String(), response.Location.String(), "response.Location") } gmt, _ := time.LoadLocation("UTC") date := time.Date(2013, time.August, 18, 8, 49, 37, 0, gmt) assertEqual(t, date, response.Date, "response.Date") }
func GetHandler(w http.ResponseWriter, req *http.Request, s Server) error { vars := mux.Vars(req) key := "/" + vars["key"] // Help client to redirect the request to the current leader if req.FormValue("consistent") == "true" && s.State() != raft.Leader { leader := s.Leader() hostname, _ := s.ClientURL(leader) url, err := url.Parse(hostname) if err != nil { log.Warn("Redirect cannot parse hostName ", hostname) return err } url.RawQuery = req.URL.RawQuery url.Path = req.URL.Path log.Debugf("Redirect consistent get to %s", url.String()) http.Redirect(w, req, url.String(), http.StatusTemporaryRedirect) return nil } recursive := (req.FormValue("recursive") == "true") sort := (req.FormValue("sorted") == "true") waitIndex := req.FormValue("waitIndex") stream := (req.FormValue("stream") == "true") if req.FormValue("wait") == "true" { return handleWatch(key, recursive, stream, waitIndex, w, s) } return handleGet(key, recursive, sort, w, s) }
func (c *Cluster) Search(search Search) (*http.Response, error) { url := c.URL url.Path = search.Path() query := search.Query() if search.Type() != SEARCH_TYPE_SCROLL { query["search_type"] = []string{search.Type().String()} } url.RawQuery = query.Encode() var ( req *http.Request createErr error ) body := search.Data() if body == nil { req, createErr = http.NewRequest("GET", url.String(), nil) } else { var buffer bytes.Buffer if err := json.NewEncoder(&buffer).Encode(body); err != nil { return nil, err } req, createErr = http.NewRequest("POST", url.String(), &buffer) } if nil != createErr { return nil, createErr } return (&http.Client{}).Do(req) }
// updateURLs checks and updates any of n's attributes that are listed in tagsToAttrs. // Any URLs found are, if they're relative, updated with the necessary changes to make // a visit to that URL also go through the proxy. // sourceURL is the URL of the page which we're currently on; it's required to make // relative links work. func (t *proxyTransport) updateURLs(n *html.Node, sourceURL *url.URL) { if n.Type != html.ElementNode { return } attrs, ok := tagsToAttrs[n.Data] if !ok { return } for i, attr := range n.Attr { if !attrs.Has(attr.Key) { continue } url, err := url.Parse(attr.Val) if err != nil { continue } // Is this URL relative? if url.Host == "" { url.Scheme = t.proxyScheme url.Host = t.proxyHost url.Path = path.Join(t.proxyPathPrepend, path.Dir(sourceURL.Path), url.Path, "/") n.Attr[i].Val = url.String() } else if url.Host == sourceURL.Host { url.Scheme = t.proxyScheme url.Host = t.proxyHost url.Path = path.Join(t.proxyPathPrepend, url.Path) n.Attr[i].Val = url.String() } } }
func (b *Bucket) post(location string, body []byte) ([]byte, error) { cli := &http.Client{} url, err := b.CabinetUrl.Parse(location) if err != nil { return nil, fmt.Errorf("cannot parse url %s", location) } req, err := http.NewRequest("POST", url.String(), bytes.NewReader(body)) if err != nil { return nil, fmt.Errorf("cannot create request %s", url.String()) } req.SetBasicAuth(Option.AdminUser, Option.AdminPass) resp, err := cli.Do(req) if err != nil { return nil, fmt.Errorf("error request to %s, %s", url.String(), err.Error()) } defer resp.Body.Close() if resp.StatusCode != 200 { return nil, fmt.Errorf("response code not 200 OK but %d, %s", resp.StatusCode, url.String()) } resp_body, err := ioutil.ReadAll(resp.Body) if err != nil { return nil, fmt.Errorf("cannot read body %s", url.String()) } return resp_body, nil }
// ServerGroups returns the supported groups, with information like supported versions and the // preferred version. func (d *DiscoveryClient) ServerGroups() (apiGroupList *unversioned.APIGroupList, err error) { // Get the groupVersions exposed at /api url := d.baseURL url.Path = "/api" resp, err := d.get(url.String()) if err != nil { return nil, err } var v unversioned.APIVersions defer resp.Body.Close() err = json.NewDecoder(resp.Body).Decode(&v) if err != nil { return nil, fmt.Errorf("unexpected error: %v", err) } apiGroup := apiVersionsToAPIGroup(&v) // Get the groupVersions exposed at /apis url.Path = "/apis" resp2, err := d.get(url.String()) if err != nil { return nil, err } defer resp2.Body.Close() apiGroupList = &unversioned.APIGroupList{} if err = json.NewDecoder(resp2.Body).Decode(&apiGroupList); err != nil { return nil, fmt.Errorf("unexpected error: %v", err) } // append the group retrieved from /api to the list apiGroupList.Groups = append(apiGroupList.Groups, apiGroup) return apiGroupList, nil }
// createRequest creates instance of http.Request containing all config // values and URL from current client and provided body. func createRequest(cl *Client, data io.Reader) (r *http.Request, err error) { url, err := createUrl(cl) if err != nil { return nil, err } var req *http.Request if data != nil { if req, err = http.NewRequest(cl.request.method, url.String(), data); err != nil { return nil, err } } else { if req, err = http.NewRequest(cl.request.method, url.String(), nil); err != nil { return nil, err } } for key, val := range cl.Headers { req.Header.Set(key, val) } if req.Header.Get(HeaderUserAgent) == "" { req.Header.Set(HeaderUserAgent, defaultUserAgentHeader) } if req.Header.Get(HeaderContentType) == "" { req.Header.Set(HeaderContentType, defaultContentType) } if req.Header.Get(HeaderAccept) == "" { req.Header.Set(HeaderAccept, defaultAcceptHeader) } return req, nil }
func (c Client) Get(uri string) (Collection, int, error) { var collection Collection var tmp_single NodeCollection var tmp_multiple NodesCollection var nodes []Node var status int url, err := url.Parse(uri) fmt.Printf("Retrieving data from %+v\n", url.String()) resp, err := http.Get(url.String()) if err != nil { fmt.Printf("%+v\n", err) status = resp.StatusCode return collection, status, err } else { status = resp.StatusCode } // Shock returns different structure if data not found // Create error if status code is not 200 if status != 200 { collection.Error = strconv.Itoa(status) e := errors.New("Status not 200") return collection, status, e } defer resp.Body.Close() body, err := ioutil.ReadAll(resp.Body) err = json.Unmarshal(body, &tmp_single) if err != nil { err2 := json.Unmarshal(body, &tmp_multiple) if err2 != nil { fmt.Printf("Something very wrong. Can't create collection from json:\n1: %v \n 2: %v", err.Error, err2.Error) status = 500 return collection, status, errors.New(strings.Join([]string{err.Error(), err2.Error()}, " ")) } collection.Error = tmp_multiple.Error collection.Status = tmp_multiple.Status collection.Limit = tmp_multiple.Limit collection.Offset = tmp_multiple.Offset collection.Total_count = tmp_multiple.Total_count collection.Data = tmp_multiple.Data //fmt.Printf("Mutiple %+v\n", collection) return collection, status, nil } else { nodes = append(nodes, tmp_single.Data) collection.Error = tmp_multiple.Error collection.Data = nodes return collection, status, nil } //nodes = append(nodes, tmp.Data.([]Node)[0]) fmt.Printf("Why %+v\n", collection) return collection, status, nil }
func TestClientRequest(t *testing.T) { Convey("Client Tests", t, func() { Convey("->setPath()", func() { url := &url.URL{Host: "test"} Convey("should format properly", func() { setPath(url, "tests") So(url.String(), ShouldEqual, "//test/tests") }) Convey("should respect an existing path", func() { url.Path = "admin" setPath(url, "test") So(url.String(), ShouldEqual, "//test/admin/test") }) }) Convey("->setIDPath()", func() { url := &url.URL{Host: "test"} Convey("should format properly an id url", func() { setIDPath(url, "tests", "1") So(url.String(), ShouldEqual, "//test/tests/1") }) }) }) }
// rewriteURL rewrites a single URL to go through the proxy, if the URL refers // to the same host as sourceURL, which is the page on which the target URL // occurred. If any error occurs (e.g. parsing), it returns targetURL. func (t *Transport) rewriteURL(targetURL string, sourceURL *url.URL) string { url, err := url.Parse(targetURL) if err != nil { return targetURL } isDifferentHost := url.Host != "" && url.Host != sourceURL.Host isRelative := !strings.HasPrefix(url.Path, "/") if isDifferentHost || isRelative { return targetURL } url.Scheme = t.Scheme url.Host = t.Host origPath := url.Path // Do not rewrite URL if the sourceURL already contains the necessary prefix. if strings.HasPrefix(url.Path, t.PathPrepend) { return url.String() } url.Path = path.Join(t.PathPrepend, url.Path) if strings.HasSuffix(origPath, "/") { // Add back the trailing slash, which was stripped by path.Join(). url.Path += "/" } return url.String() }
func refPage(page interface{}, ref, methodName string) template.HTML { value := reflect.ValueOf(page) method := value.MethodByName(methodName) if method.IsValid() && method.Type().NumIn() == 1 && method.Type().NumOut() == 2 { result := method.Call([]reflect.Value{reflect.ValueOf(ref)}) url, err := result[0], result[1] if !err.IsNil() { jww.ERROR.Printf("%s", err.Interface()) return template.HTML(fmt.Sprintf("%s", err.Interface())) } if url.String() == "" { jww.ERROR.Printf("ref %s could not be found\n", ref) return template.HTML(ref) } return template.HTML(url.String()) } jww.ERROR.Printf("Can only create references from Page and Node objects.") return template.HTML(ref) }
func Test_ItTracksURLsItHasVisited(t *testing.T) { output := new(bytes.Buffer) mockFetcher := &MockFetcher{} app := NewCrawlerApp(output, mockFetcher) urlString := "http://www.google.com" expectedVisitedMap := map[string]bool{ urlString: true, "http://www.google.com/1": true, } url, err := url.Parse(urlString) assert.Nil(t, err) mockFetcher.On("Fetch", url.String()).Return(&PageResults{ internalURLs: map[string]bool{ "http://www.google.com/1": true, }, }, nil) mockFetcher.On("Fetch", url.String()+"/1").Return(&PageResults{}, nil) app.waitGroup.Add(1) app.Crawl(url, 2) app.waitGroup.Wait() assert.Equal(t, expectedVisitedMap, app.Visited) }
func TestExtracRequestURL(t *testing.T) { urlTests := []struct { r *http.Request url string }{ { &http.Request{ Host: "localhost", URL: &url.URL{Path: "/path", RawQuery: "a=b"}, }, "http://localhost/path", }, { &http.Request{ Host: "www.myhost.com", URL: &url.URL{Path: "/"}, TLS: &tls.ConnectionState{}, }, "https://www.myhost.com/", }, } for i, ut := range urlTests { if url := extractRequestURL(ut.r); url.String() != ut.url { t.Errorf("%d. extractRequestURL() = %q; want %q", i, url.String(), ut.url) } } }
func (v *Validator) start() { for { select { case url, ok := <-v.Input: //Stop worker if channel is closed if !ok { log.Println("Validator: channel closed -> stopping") return } ReorderAndCrop(v.config.Parsing, url) stringURL := url.String() URLProtStripped := stringURL if v.config.Parsing.CutProtocol { URLProtStripped = StripProtocol(stringURL) } //log.Println(stringurl) if _, ok := v.sites[URLProtStripped]; ok { //log.Println("Validator: Skipping ", stringurl, " - already looked up") v.waitGroup.Done() continue } //Mark as already processed v.sites[URLProtStripped] = struct{}{} //Check for robots if v.config.Parsing.RespectRobots && v.robots != nil { if !v.robots.TestAgent(url.Path, v.config.Parsing.UserAgent) { log.Println("Validator: Skipping ", stringURL, " - denied by robots") v.waitGroup.Done() continue } } if !strings.HasSuffix(strings.ToLower(url.Host), strings.ToLower(v.url.Host)) { log.Println("Validator: Skipping ", stringURL, " - invalid host: ", url.Host, " expected: ", v.url.Host) v.waitGroup.Done() continue } //Push to workers if !ShallParse(v.config.Parsing, stringURL) { //log.Println("Validator: Skipping ", stringurl, " - excluded from parsing") //Excluding from parsing does not exclude from adding to sitemap files v.waitGroup.Add(1) v.generator <- url.String() v.waitGroup.Done() continue } v.workerQueue.In() <- url } } }
func setup() { mux = http.NewServeMux() server = httptest.NewServer(mux) gitter = New("abc") // Fake the API and Stream base URLs by using the test // server URL instead. url, _ := url.Parse(server.URL) gitter.config.apiBaseURL = url.String() + "/" gitter.config.streamBaseURL = url.String() + "/" }
// adjustURL updates the profile source URL based on heuristics. It // will append ?seconds=sec for CPU profiles if not already // specified. Returns the hostname if the profile is remote. func adjustURL(source string, sec int, ui plugin.UI) (adjusted, host string, duration time.Duration) { // If there is a local file with this name, just use it. if _, err := os.Stat(source); err == nil { return source, "", 0 } url, err := url.Parse(source) // Automatically add http:// to URLs of the form hostname:port/path. // url.Parse treats "hostname" as the Scheme. if err != nil || (url.Host == "" && url.Scheme != "" && url.Scheme != "file") { url, err = url.Parse("http://" + source) if err != nil { return source, "", 0 } } if scheme := strings.ToLower(url.Scheme); scheme == "" || scheme == "file" { url.Scheme = "" return url.String(), "", 0 } values := url.Query() if urlSeconds := values.Get("seconds"); urlSeconds != "" { if us, err := strconv.ParseInt(urlSeconds, 10, 32); err == nil { if sec >= 0 { ui.PrintErr("Overriding -seconds for URL ", source) } sec = int(us) } } switch strings.ToLower(url.Path) { case "", "/": // Apply default /profilez. url.Path = cpuProfileHandler case "/protoz": // Rewrite to /profilez?type=proto url.Path = cpuProfileHandler values.Set("type", "proto") } if hasDuration(url.Path) { if sec > 0 { duration = time.Duration(sec) * time.Second values.Set("seconds", fmt.Sprintf("%d", sec)) } else { // Assume default duration: 30 seconds duration = 30 * time.Second } } url.RawQuery = values.Encode() return url.String(), url.Host, duration }
func (app *App) RebuildRoutes() (*RebuildRoutesResult, error) { routerName, err := app.GetRouter() if err != nil { return nil, err } r, err := router.Get(routerName) if err != nil { return nil, err } err = r.AddBackend(app.Name) if err != nil && err != router.ErrBackendExists { return nil, err } for _, cname := range app.CName { err := r.SetCName(cname, app.Name) if err != nil && err != router.ErrCNameExists { return nil, err } } oldRoutes, err := r.Routes(app.GetName()) if err != nil { return nil, err } expectedMap := make(map[string]*url.URL) for _, unit := range app.Units() { expectedMap[unit.Address.String()] = unit.Address } var toRemove []*url.URL for _, url := range oldRoutes { if _, isPresent := expectedMap[url.String()]; isPresent { delete(expectedMap, url.String()) } else { toRemove = append(toRemove, url) } } var result RebuildRoutesResult for _, toAddUrl := range expectedMap { err := r.AddRoute(app.GetName(), toAddUrl) if err != nil { return nil, err } result.Added = append(result.Added, toAddUrl.String()) } for _, toRemoveUrl := range toRemove { err := r.RemoveRoute(app.GetName(), toRemoveUrl) if err != nil { return nil, err } result.Removed = append(result.Removed, toRemoveUrl.String()) } return &result, nil }
func (hf HttpFetcher) GetAll(urls []string) ([]*HttpFetcherResponse, error) { m := metrics.GetOrRegisterTimer("fn.FetchRemoteData", nil) // TODO: update metric name defer m.UpdateSince(time.Now()) resps := make([]*HttpFetcherResponse, len(urls)) var wg sync.WaitGroup wg.Add(len(urls)) // TODO: add thruput here.. for i, urlStr := range urls { resps[i] = &HttpFetcherResponse{} go func(resp *HttpFetcherResponse) { defer wg.Done() url, err := urlx.Parse(urlStr) if err != nil { resp.Err = err return } resp.URL = url lg.Infof("Fetching %s", url.String()) fetch, err := hf.client().Get(url.String()) if err != nil { lg.Warnf("Error fetching %s because %s", url.String(), err) resp.Err = err return } defer fetch.Body.Close() resp.Status = fetch.StatusCode body, err := ioutil.ReadAll(fetch.Body) if err != nil { resp.Err = err return } resp.Data = body resp.Err = nil }(resps[i]) } wg.Wait() return resps, nil }
func (f Fetcher) GetAll(ctx context.Context, urls []string) ([]*FetcherResponse, error) { defer metrics.MeasureSince([]string{"fn.FetchRemoteData"}, time.Now()) fetches := make([]*FetcherResponse, len(urls)) var wg sync.WaitGroup wg.Add(len(urls)) // TODO: add thruput here.. for i, urlStr := range urls { fetches[i] = &FetcherResponse{} go func(fetch *FetcherResponse) { defer wg.Done() url, err := urlx.Parse(urlStr) if err != nil { fetch.Err = err return } fetch.URL = url lg.Infof("Fetching %s", url.String()) resp, err := ctxhttp.Get(ctx, f.client(), url.String()) if err != nil { lg.Warnf("Error fetching %s because %s", url.String(), err) fetch.Err = err return } defer resp.Body.Close() fetch.Status = resp.StatusCode body, err := ioutil.ReadAll(resp.Body) if err != nil { fetch.Err = err return } fetch.Data = body fetch.Err = nil }(fetches[i]) } wg.Wait() return fetches, nil }
func TestGetResults(t *testing.T) { parseFunc = func(root *xmlpath.Node) ([]*Torrent, error) { return nil, nil } // Request URL var reqURL string // Fake server ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { fmt.Fprintln(w, `test`) reqURL = fmt.Sprintf("http://%s%s", r.Host, r.URL.String()) })) defer ts.Close() // Client c := New() c.Endpoint = ts.URL type mock struct { f func(q *Query) ([]*Torrent, error) baseURLFunc func(q *Query) string } for _, m := range []mock{ { f: c.ListByUser, baseURLFunc: c.listByUserBaseURL, }, { f: c.Search, baseURLFunc: c.searchBaseURL, }, } { _, err := m.f(mockQuery) if err != nil { t.Fatalf("expected no error got %q", err) } url, err := url.Parse(fmt.Sprintf("%s/%s", m.baseURLFunc(mockQuery), mockQuery.urlParams(1))) if err != nil { t.Fatalf("failed to parse URL: %q", err) } if reqURL != url.String() { t.Errorf("invalid search url: expected %q, got %q", url.String(), reqURL) } } }
func probeGitUrl(u *url.URL, insecure bool, schemes []string) (string, error) { git := func(url *url.URL) error { fmt.Println("URL:", url, url.String()) out, err := run("git", "ls-remote", url.String(), "HEAD") if err != nil { return err } if !bytes.Contains(out, []byte("HEAD")) { return fmt.Errorf("not a git repo") } return nil } return probe(git, u, insecure, schemes...) }
func TestClient_URL(t *testing.T) { tests := []struct { base string route string routeVars map[string]string opt interface{} exp string }{{ base: "https://sourcegraph.com/api/", route: router.Repo, routeVars: map[string]string{"RepoSpec": "github.com/gorilla/mux"}, exp: "https://sourcegraph.com/api/repos/github.com/gorilla/mux", }, { base: "https://sourcegraph.com/api", route: router.Repo, routeVars: map[string]string{"RepoSpec": "github.com/gorilla/mux"}, exp: "https://sourcegraph.com/api/repos/github.com/gorilla/mux", }, { base: "http://localhost:3000/api/", route: router.Repo, routeVars: map[string]string{"RepoSpec": "github.com/gorilla/mux"}, exp: "http://localhost:3000/api/repos/github.com/gorilla/mux", }, { base: "http://localhost:3000/api", route: router.Repo, routeVars: map[string]string{"RepoSpec": "github.com/gorilla/mux"}, exp: "http://localhost:3000/api/repos/github.com/gorilla/mux", }} for _, test := range tests { func() { c := NewClient(nil) baseURL, err := url.Parse(test.base) if err != nil { t.Fatal(err) } c.BaseURL = baseURL url, err := c.URL(test.route, test.routeVars, test.opt) if err != nil { t.Errorf("Error generating URL: %s", err) return } if url.String() != test.exp { t.Errorf("Expected %s, got %s on test case %+v", test.exp, url.String(), test) return } }() } }
func (dbConfig *DbConfig) setup(name string) error { dbConfig.name = name if dbConfig.Bucket == nil { dbConfig.Bucket = &dbConfig.name } if dbConfig.Server == nil { dbConfig.Server = &DefaultServer } if dbConfig.Pool == nil { dbConfig.Pool = &DefaultPool } url, err := url.Parse(*dbConfig.Server) if err == nil && url.User != nil { // Remove credentials from URL and put them into the DbConfig.Username and .Password: if dbConfig.Username == "" { dbConfig.Username = url.User.Username() } if dbConfig.Password == "" { if password, exists := url.User.Password(); exists { dbConfig.Password = password } } url.User = nil urlStr := url.String() dbConfig.Server = &urlStr } if dbConfig.Shadow != nil { url, err = url.Parse(*dbConfig.Shadow.Server) if err == nil && url.User != nil { // Remove credentials from shadow URL and put them into the DbConfig.Shadow.Username and .Password: if dbConfig.Shadow.Username == "" { dbConfig.Shadow.Username = url.User.Username() } if dbConfig.Shadow.Password == "" { if password, exists := url.User.Password(); exists { dbConfig.Shadow.Password = password } } url.User = nil urlStr := url.String() dbConfig.Shadow.Server = &urlStr } } return err }
// Adds common parameters to the "params" map, signs the request, // adds the signature to the "params" map and sends the request // to the server. It then unmarshals the response in to the "resp" // parameter using xml.Unmarshal() func (mt *MTurk) query(params map[string]string, operation string, resp interface{}) error { service := "AWSMechanicalTurkRequester" timestamp := time.Now().UTC().Format("2006-01-02T15:04:05Z") params["AWSAccessKeyId"] = mt.Auth.AccessKey params["Service"] = service params["Timestamp"] = timestamp params["Operation"] = operation // make a copy url := *mt.URL sign(mt.Auth, service, operation, timestamp, params) url.RawQuery = multimap(params).Encode() r, err := http.Get(url.String()) if err != nil { return err } //dump, _ := httputil.DumpResponse(r, true) //println("DUMP:\n", string(dump)) if r.StatusCode != 200 { return errors.New(fmt.Sprintf("%d: unexpected status code", r.StatusCode)) } dec := xml.NewDecoder(r.Body) err = dec.Decode(resp) r.Body.Close() return err }
// Fetch loads the page at the uri address and returns a WebPage with fields filled // with its data. // // It parses the html to get the <title> and <a> tags, not keeping the links // to address itself, #, javascript: or `^(https?)://`. // If it can't find any <title> tag, it uses the basename of the address as title. // Fetch returns an error if it can't get to the url. // If not provided, address scheme defaults to https. func (w *WebPage) Fetch(address string) error { // Check address if address == "" { return fmt.Errorf("crawler: Fetch() - address is empty") } url, err := url.Parse(address) if err != nil { return err } // If not provided, scheme defaults to https if url.Scheme == "" { url.Scheme = "https" } // Get the body resp, err := http.Get(url.String()) if err != nil { return err } defer resp.Body.Close() dom, err := parse(resp.Body) if err != nil { // Something really bad happened : by design all errors returned by html.Parse // are bugs. See https://groups.google.com/forum/#!topic/golang-nuts/wysIRCbBMwU // for more informations. return err } w.dom = dom w.parseDom() return nil }
func (s *toolsSuite) TestUploadRejectsWrongModelUUIDPath(c *gc.C) { // Check that we cannot access the tools at https://host:port/BADModelUUID/tools url := s.toolsURL(c, "") url.Path = "/model/dead-beef-123456/tools" resp := s.authRequest(c, httpRequestParams{method: "POST", url: url.String()}) s.assertErrorResponse(c, resp, http.StatusNotFound, `unknown model: "dead-beef-123456"`) }
func (hs *HsAPI) makeRequest(endpoint string, keyparam string, params url.Values) ([]byte, error) { url, err := url.Parse(BaseURL) if keyparam != "" { url.Path += fmt.Sprintf(endpoint, keyparam) } else { url.Path += endpoint } url.RawQuery = params.Encode() req, err := http.NewRequest("GET", url.String(), nil) req.Header.Set("X-Mashape-Key", hs.token) req.Header.Set("Accept", "application/json") resp, err := hs.client.Do(req) defer resp.Body.Close() body, err := ioutil.ReadAll(resp.Body) if hs.Debug { log.Println(url) log.Println(string(body)) } if resp.StatusCode == http.StatusOK { return body, nil } return nil, err }
// GetSized downloads the named meta file with the given size. A short body // is acceptable because in the case of timestamp.json, the size is a cap, // not an exact length. // If size is "NoSizeLimit", this corresponds to "infinite," but we cut off at a // predefined threshold "notary.MaxDownloadSize". func (s HTTPStore) GetSized(name string, size int64) ([]byte, error) { url, err := s.buildMetaURL(name) if err != nil { return nil, err } req, err := http.NewRequest("GET", url.String(), nil) if err != nil { return nil, err } resp, err := s.roundTrip.RoundTrip(req) if err != nil { return nil, NetworkError{Wrapped: err} } defer resp.Body.Close() if err := translateStatusToError(resp, name); err != nil { logrus.Debugf("received HTTP status %d when requesting %s.", resp.StatusCode, name) return nil, err } if size == NoSizeLimit { size = notary.MaxDownloadSize } if resp.ContentLength > size { return nil, ErrMaliciousServer{} } logrus.Debugf("%d when retrieving metadata for %s", resp.StatusCode, name) b := io.LimitReader(resp.Body, size) body, err := ioutil.ReadAll(b) if err != nil { return nil, err } return body, nil }