func main() { page_count := 1 download_count := 0 for { offset := page_count * 50 param := map[string]string{ "AppId": appid, "Version": "2.2", "Market": "ja-JP", "Sources": "Image", "Image.Count": strconv.Itoa(50), "Image.Offset": strconv.Itoa(offset), "Adult": "off", "Query": "おっぱい", } var sr *json_root res, err := http.Get(get_request_uri(param)) if err != nil { break } reader := bufio.NewReader(res.Body) line, err := reader.ReadBytes('\n') if err == nil { break } json.Unmarshal(line, &sr) for i := 0; i < len(sr.SearchResponse.Image.Results); i++ { result := sr.SearchResponse.Image.Results[i] if regexp.MustCompile(".jpg$").FindString(result.MediaUrl) == "" { continue } download_count++ filename := md5hex(encode_utf8(result.MediaUrl)) + ".jpg" filepath := dir + filename if _, err := os.Stat(filepath); err == nil { continue } fmt.Printf("%d : Download... %s\n", download_count, result.MediaUrl) res, err := http.Get(result.MediaUrl) if err != nil { runtime.Goexit() } data, err := ioutil.ReadAll(res.Body) if err != nil { runtime.Goexit() } if regexp.MustCompile("^image").FindString(http.DetectContentType(data)) != "" { ioutil.WriteFile(filepath, data, 0666) } } page_count++ } }
func main() { // This works r, _, err := http.Get("https://www.google.com") if err != nil { log.Exit(err) } log.Println(r) // This doesn't r, _, err = http.Get("https://streaming.campfirenow.com") if err != nil { log.Exit(err) } log.Println(r) }
func testQuestions() { r, _, err := http.Get("http://api.stackoverflow.com/1.0/questions?key=change_me&answers=true") if err != nil { fmt.Println(err.String()) return } d, err := gzip.NewReader(r.Body) if err != nil { fmt.Println(err.String()) return } var qr gostack.QuestionsResult b, _ := ioutil.ReadAll(d) err = json.Unmarshal(b, &qr) if err != nil { fmt.Println(err.String()) return } fmt.Println(qr) }
func (req *Googl) get(url string, params ...map[string]string) (string, os.Error) { if !strings.Contains(url, "://goo.gl/") { return "", os.EINVAL } req_url := GOOGL_V1 + "?shortUrl=" + url if 0 < len(params) { for i := 0; i < len(params); i++ { req_url += "&" + toQuery(params[i]) } } if "" != req.Key { req_url += "&key=" + req.Key } res, _, err := http.Get(req_url) defer res.Body.Close() if err != nil { return "", os.Error(err) } body, _ := ioutil.ReadAll(res.Body) return string(body), nil }
func (s *TestHTTPServer) Start() { if s.started { return } s.started = true s.request = make(chan *http.Request, 64) s.response = make(chan *testResponse, 64) s.pending = make(chan bool, 64) url, _ := url.Parse(s.URL) go http.ListenAndServe(url.Host, s) s.PrepareResponse(202, nil, "Nothing.") fmt.Fprintf(os.Stderr, "\nWaiting for the fake server to be up...") for { resp, err := http.Get(s.URL) if err == nil && resp.StatusCode == 202 { break } time.Sleep(1e8) } fmt.Fprintf(os.Stderr, "Done\n") s.WaitRequest() }
func DiscoverXml(id string) (*string, os.Error) { resp, _, err := http.Get(id) if err != nil { return nil, err } defer resp.Body.Close() parser := xml.NewParser(resp.Body) inURI := false for { t, err := parser.Token() if err != nil { return nil, err } switch tt := t.(type) { case xml.StartElement: if tt.Name.Local == "URI" { inURI = true } case xml.CharData: if inURI { s := string([]byte(tt)) return &s, nil } } } return nil, &DiscoveryError{str: "URI not found"} }
func (c *Client) StatusesRetweets(id uint64, count int) (t []Tweet) { var params string var tweets []Tweet if id == 0 { return nil } if count != 0 { params = addParam(params, "count", fmt.Sprintf("%d", count)) } url := c.makeAuthURL(statusesRetweets+fmt.Sprintf("/%d", id), params) res, _, err := http.Get(url) if err != nil { return nil } if res.Status != "200 OK" { return nil } reader := bufio.NewReader(res.Body) line, _ := reader.ReadString(0) print(line + "\n") json.Unmarshal(line, &tweets) return tweets }
func (sns *SNS) query(topic *Topic, message *Message, params map[string]string, resp interface{}) os.Error { params["Timestamp"] = time.UTC().Format(time.RFC3339) url_, err := url.Parse(sns.Region.SNSEndpoint) if err != nil { return err } sign(sns.Auth, "GET", "/", params, url_.Host) url_.RawQuery = multimap(params).Encode() r, err := http.Get(url_.String()) if err != nil { return err } defer r.Body.Close() //dump, _ := http.DumpResponse(r, true) //println("DUMP:\n", string(dump)) //return nil if r.StatusCode != 200 { return buildError(r) } err = xml.Unmarshal(r.Body, resp) return err }
// scrape scrapes the given URL and saves it to disk func scrape(url string, done chan bool) { // Notify main when we're done right after we return defer func() { done <- true }() // Anonymous functions ftw! fmt.Printf("Scraping %s...\n", url) defer fmt.Printf("Finished scraping %s\n", url) // Don't make the user type "http://" for every freaking URL! if !strings.Contains(url, "://") { url = "http://" + url } // Download website contents req, err := http.Get(url) if somethingBroke(err) { return } // Save contents to variable contents, err := ioutil.ReadAll(req.Body) defer req.Body.Close() if somethingBroke(err) { return } // Write contents to disk. TODO: Store URL, text data in a DB url = strings.Replace(url, "/", "___", -1) filename := fmt.Sprintf("%v-%v", url, time.Seconds()) err = ioutil.WriteFile(SCRAPES_DIR+filename, contents, 0644) if somethingBroke(err) { return } return }
func fetchUrlToFile(url, filename string, expectedSize int64) bool { fi, statErr := os.Stat(filename) if statErr == nil && (expectedSize == -1 && fi.Size > 0 || expectedSize == fi.Size) { // TODO: re-fetch mode? return true } netop := NewNetworkOperation() defer netop.Done() res, _, err := http.Get(url) if err != nil { addError(fmt.Sprintf("Error fetching %s: %v", url, err)) return false } defer res.Body.Close() fileBytes, err := ioutil.ReadAll(res.Body) if err != nil { addError(fmt.Sprintf("Error reading XML from %s: %v", url, err)) return false } err = ioutil.WriteFile(filename, fileBytes, 0600) if err != nil { addError(fmt.Sprintf("Error writing file %s: %v", filename, err)) return false } return true }
func randomQuote() string { // Simulates slow connection; shows off `defer`, goroutine asynchronicity defer func() { time.Sleep(2e9) }() // Scrape site url := "http://subfusion.net/cgi-bin/quote.pl?quote=cookie&number=1" req, err := http.Get(url) checkError(err) // Create regex re, err := regexp.Compile("<body><br><br><b><hr><br>(.*)<br><br><hr><br>") checkError(err) // Read HTML html, err := ioutil.ReadAll(req.Body) req.Body.Close() checkError(err) // Parse out quote quote := re.FindString(string(html)) quote = strings.Replace(quote, "<body><br><br><b><hr><br>", "", -1) quote = strings.Replace(quote, "<br><br><hr><br>", "", -1) return quote }
func tf(c *http.Conn, b string, e string, n int, y int, s string) int { var qs string var ntf int = 0 if len(s) < 1 { return ntf } if s[0] == '#' && len(s) > 1 { qs = s } else { qs = "from:" + s } r, _, err := http.Get(fmt.Sprintf(queryURI, qformat, http.URLEscape(qs), b, e, n)) if err == nil { if r.StatusCode == http.StatusOK { ntf = readjson(c, r.Body, b, e, y) } else { fmt.Printf("Twitter is unable to search for %s (%s)\n", s, r.Status) } r.Body.Close() } else { fmt.Printf("%v\n", err) } return ntf }
func Crawl(url string) (result string, finalUrl string, err os.Error) { resp, finalUrl, err := http.Get(url) if err == nil { fmt.Println("Status: " + resp.Status) fmt.Printf("Close: %b\n", resp.Close) body := resp.Body defer body.Close() buf := make([]byte, BufSize) var n int for { n, err = body.Read(buf) if n != 0 { result += string(buf[0:n]) } if err != nil { break } } } if err == os.EOF { err = nil } return result, finalUrl, err }
func parseChat(msg string, m *ircbot.Message) (reply string) { if config.Ignores[m.GetSender()] { return "" } if msg == "!help acm-bot" { return help(nil) } if matches := urlRegex.FindAllStringSubmatch(msg, -1); matches != nil { for _, m := range matches { response, finalURL, err := http.Get(m[0]) if err != nil { errors.Printf("%s - Fetch failed: %s\n", m[0], err.String()) } else if finalURL != m[0] || config.TitleWhitelist[m[1]] { if t := getTitle(response.Body); t != "" { info.Println("Fetched: " + m[0]) reply += fmt.Sprintf("Title:%s\n", t) } } } } recordSighting(m) return }
func testBadges() { r, _, err := http.Get("http://api.stackoverflow.com/1.0/badges?key=change_me") if err != nil { fmt.Println(err.String()) return } d, err := gzip.NewReader(r.Body) if err != nil { fmt.Println(err.String()) return } var br gostack.BadgesResult b, _ := ioutil.ReadAll(d) err = json.Unmarshal(b, &br) if err != nil { fmt.Println(err.String()) return } fmt.Println(br) }
func GetChunk(chunkX, chunkY int) (*ChunkInfo, os.Error) { address := fmt.Sprintf("http://%s:%s/a/r?cy=%d&cx=%d", *ip, *port, chunkY, chunkX) fmt.Println("Making request " + address) r, _, err := http.Get(address) if err != nil { log.Panic(err) } defer r.Body.Close() content, err := ioutil.ReadAll(r.Body) if err != nil { log.Panic(err) } info := &ChunkInfo{chunkX, chunkY, &ChunkData{}} err = json.Unmarshal(content, &info.Data) if err != nil { fmt.Println(err.String()) } for y := 0; y < 16; y++ { for x := 0; x < 16; x++ { if len(info.Data[y][x]) == 0 { info.Data[y][x] = append(info.Data[y][x], "0") } else if info.Data[y][x][0] == "" { info.Data[y][x][0] = "0" } } } fmt.Println(info.Data) return info, err }
func (site *Site) GetRooms() { url := site.CampfireUrl("/rooms.xml") parsed_url := ParseURL(url) //fmt.Printf("Going to request URL: %s\n", parsedUrl.String()) response, err := http.Get(parsed_url.String()) if err != nil { log.Fatal(err) } defer response.Body.Close() if response.StatusCode != 200 { log.Printf("Status: %s\n", response.Status) log.Fatal("Could not list rooms") } parser := xml.NewParser(response.Body) rooms := Rooms{Room: nil} err = parser.Unmarshal(&rooms, nil) if err != nil { log.Fatal("Error unmarshalling xml:", err) } site.Rooms = rooms.Room fmt.Println("Rooms", rooms) }
// Adds common parameters to the "params" map, signs the request, // adds the signature to the "params" map and sends the request // to the server. It then unmarshals the response in to the "resp" // parameter using xml.Unmarshal() func (mt *MTurk) query(params map[string]string, operation string, resp interface{}) os.Error { service := MTURK_SERVICE timestamp := time.UTC().Format(TIMESTAMP_FORMAT) params["AWSAccessKeyId"] = mt.Auth.AccessKey params["Service"] = service params["Timestamp"] = timestamp params["Operation"] = operation // make a copy url := *mt.URL sign(mt.Auth, service, operation, timestamp, params) url.RawQuery = multimap(params).Encode() r, err := http.Get(url.String()) if err != nil { return err } dump, _ := http.DumpResponse(r, true) println("DUMP:\n", string(dump)) if r.StatusCode != 200 { return os.NewError(fmt.Sprintf("%d: unexpected status code", r.StatusCode)) } err = xml.Unmarshal(r.Body, resp) r.Body.Close() return err }
func getDcbValue(period int) (string, os.Error) { res, _, err := http.Get(fmt.Sprintf("http://kaijiang.zhcw.com/zhcw/html/ssq/detail_%d.html", period)) if err != nil { return "", err } defer res.Body.Close() js, _ := ioutil.ReadAll(res.Body) s := string(js) i := strings.Index(s, "中奖号码") if i != -1 { s := string(s[i:]) b := strings.Index(s, `<li`) e := strings.Index(s, `</ul>`) if b > e { return "", os.NewError("parse err\n") } xmlStr := string(s[b : e-1]) var l Xml xmlStr = `<?xml version="1.0" encoding="UTF-8"?><a xmlns="http://domain">` + xmlStr + "</a>" fmt.Printf("%s\n", xmlStr) if err := xml.Unmarshal(strings.NewReader(xmlStr), &l); err != nil { return "", err } else { var str string for _, v := range l.Li { str += fmt.Sprintf("%2d|", v) } return str, nil } } else { return "", os.NewError("404\n") } return "", nil }
func runTest(test testRecord, j int, webaddr string, t *testing.T) { var response *http.Response var err os.Error defer func() { done <- j }() url := "http://" + webaddr + test.URL if response, _, err = http.Get(url); err != nil { t.Error(err) } if response.StatusCode != test.StatusCode { t.Error(j, webaddr, test.URL, "Response had wrong status code:", response.StatusCode) } if len(test.BodyPrefix) > 0 { prefix := make([]byte, len(test.BodyPrefix)) if n, err := response.Body.Read(prefix); err == nil { p := string(prefix[0:n]) if p != test.BodyPrefix { t.Error(j, webaddr, test.URL, "Bad body, expected prefix:", test.BodyPrefix, "got:", p) } } else { t.Error(j, webaddr, test.URL, "Error reading response.Body:", err) } } if test.Headers != nil { for _, hdr := range test.Headers { if v := response.GetHeader(hdr.Key); v != hdr.Val { t.Error(j, webaddr, test.URL, "Header value in response:", strconv.Quote(v), "did not match", strconv.Quote(hdr.Val)) } } } }
func (c *Client) RetweetedOfMe(sinceId uint64, maxId uint64, count uint, page uint) (t []Tweet) { var params string var tweets []Tweet if sinceId != 0 { params = addParam(params, "since_id", fmt.Sprintf("%d", sinceId)) } if maxId != 0 { params = addParam(params, "max_id", fmt.Sprintf("%d", maxId)) } if count != 0 { params = addParam(params, "count", fmt.Sprintf("%d", count)) } if page != 0 { params = addParam(params, "page", fmt.Sprintf("%d", page)) } url := c.makeAuthURL(retweetedOfMe, params) res, _, err := http.Get(url) if err != nil { return nil } if res.Status != "200 OK" { return nil } reader := bufio.NewReader(res.Body) line, _ := reader.ReadString(0) json.Unmarshal(line, &tweets) return tweets }
func BenchmarkStaticFileOverHTTPWithMultiplex(b *testing.B) { b.StopTimer() var C = 50 // number of simultaneous clients http.Handle("/static/", http.FileServer("_test/", "/static")) if err := createStaticTestFile(); err != nil { log.Print("Failed to create test file:", err) return } weblisten, err := net.Listen("tcp", ":0") if err != nil { log.Print("net.Listen error:", err) return } url := "http://" + weblisten.Addr().String() + "/static/fcgi_test.html" go http.Serve(weblisten, nil) // allow this many simultaneous connections to the webserver start := make(chan bool, C) for i := 0; i < C; i++ { start <- true } done := make(chan bool, b.N) // for syncing all the multiplex goroutines b.StartTimer() log.Print("Loop starting...", b.N) for i := 0; i < b.N; i++ { go func(index int) { <-start defer func() { done <- true start <- true }() response, _, err := http.Get(url) if err != nil { log.Print("http.Get error:", err) } if response == nil { log.Print("Nil response.") return } if response.StatusCode != 200 { log.Print("Bad response status:", response.StatusCode) return } if response != nil { body, err := ioutil.ReadAll(response.Body) if err != nil { log.Print("ioutil.ReadAll error:", err) return } b.SetBytes(int64(len(body))) response.Body.Close() } }(i) } for i := 0; i < b.N; i++ { <-done } weblisten.Close() removeStaticTestFile() }
func (c *Client) StatusesFollowers(userId uint64, screenName string, cursor int) (u []User) { var params string var users []User if userId != 0 { params = addParam(params, "user_id", fmt.Sprintf("%d", userId)) } if screenName != "" { params = addParam(params, "screen_name", screenName) } if cursor != 0 { params = addParam(params, "cursor", fmt.Sprintf("%d", cursor)) } url := c.makeAuthURL(statusesFollowers, params) res, _, err := http.Get(url) if err != nil { return nil } if res.Status != "200 OK" { return nil } reader := bufio.NewReader(res.Body) line, _ := reader.ReadString(0) json.Unmarshal(line, &users) return users }
// home handles requests to the home page. func home(req *web.Request) { token, err := credentials(req, "tok") if err != nil { homeLoggedOut(req) return } param := make(web.Values) url := "http://api.twitter.com/1/statuses/home_timeline.json" oauthClient.SignParam(token, "GET", url, param) url = url + "?" + param.FormEncodedString() resp, err := http.Get(url) if err != nil { req.Error(web.StatusInternalServerError, err) return } defer resp.Body.Close() if resp.StatusCode != 200 { req.Error(web.StatusInternalServerError, os.NewError(fmt.Sprint("Status ", resp.StatusCode))) return } var d interface{} err = json.NewDecoder(resp.Body).Decode(&d) if err != nil { req.Error(web.StatusInternalServerError, err) return } homeTempl.Execute(req.Respond(web.StatusOK, web.HeaderContentType, web.ContentTypeHTML), d) }
func DiscoverHtml(id string) (*string, os.Error) { resp, _, err := http.Get(id) if err != nil { return nil, err } defer resp.Body.Close() tokenizer := html.NewTokenizer(resp.Body) for { tt := tokenizer.Next() switch tt { case html.ErrorToken: log.Println("Error: ", tokenizer.Error()) return nil, tokenizer.Error() case html.StartTagToken, html.EndTagToken: tk := tokenizer.Token() if tk.Data == "link" { ok := false for _, attr := range tk.Attr { if attr.Key == "rel" && attr.Val == "openid2.provider" { log.Println(tk.String()) ok = true } else if attr.Key == "href" && ok { return &attr.Val, nil } } } } } return nil, &DiscoveryError{str: "provider not found"} }
//Retrieve the gist of the error logs pointed to by the URL func getBuildLog(url string) string { response, err := http.Get(url) if url == "" { return "" } defer response.Body.Close() b, err := ioutil.ReadAll(response.Body) check(err) //only the last part of the log is relevant e := len(b) - 1 s := 0 if e > lastBytes { s = e - lastBytes } //drop anything after the error message res := strings.SplitN(string(b[s:e]), endOfLog, 2) //keep only last 200 lines split := strings.Split(res[0], "\n") e = len(split) - 1 s = 0 if e > 200 { s = e - 200 } return strings.Join(split[s:e], "\n") }
func shorten(long string) (short string) { key := "R_e659dbb5514e34edc3540a7c95b0041b" login := "******" long = url.QueryEscape(long) url_ := fmt.Sprintf("http://api.bit.ly/v3/shorten?login=%s&apiKey=%s&longUrl=%s&format=json", login, key, long) r, err := http.Get(url_) defer r.Body.Close() if err != nil { return "Error connecting to bit.ly" } b, err := ioutil.ReadAll(r.Body) if err != nil { return "Error reading bit.ly response" } var j map[string]interface{} err = json.Unmarshal(b, &j) if err != nil { return "Unable to shorten URL." } var data map[string]interface{} = j["data"].(map[string]interface{}) return data["url"].(string) }
func ExpandURL(shortUrl string) (expandedUrl string, err os.Error) { param := http.EncodeQuery(map[string][]string{"shortUrl": {shortUrl}}) res, _, err := http.Get("https://www.googleapis.com/urlshortener/v1/url?" + param) if err != nil { return } if res.StatusCode != 200 { err = os.NewError("failed to post") return } b, err := ioutil.ReadAll(res.Body) if err != nil { return } var decbuf bytes.Buffer decbuf.Write(b) dec := json.NewDecoder(&decbuf) var out map[string]interface{} err = dec.Decode(&out) if err != nil { return } expandedUrl = out["longUrl"].(string) return }
func remoteSearch(query string) (res *http.Response, err os.Error) { search := "/search?f=text&q=" + url.QueryEscape(query) // list of addresses to try var addrs []string if *serverAddr != "" { // explicit server address - only try this one addrs = []string{*serverAddr} } else { addrs = []string{ defaultAddr, "golang.org", } } // remote search for _, addr := range addrs { url := "http://" + addr + search res, err = http.Get(url) if err == nil && res.StatusCode == http.StatusOK { break } } if err == nil && res.StatusCode != http.StatusOK { err = os.NewError(res.Status) } return }
func Fetcher(queue *list.List, limit int) chan *WikiArticle { articles := make(chan *WikiArticle) go func() { for i := 0; i < limit; i++ { for queue.Front() == nil { runtime.Gosched() } // url := q.PopFront() article := queue.Front().Value.(*WikiArticle) queue.Remove(queue.Front()) r, url, e := http.Get(article.url) if e != nil { println("Failed:", article.url) continue } println("Fetched:", article.depth, url) buf := bytes.NewBufferString("") io.Copy(buf, r.Body) article.content = buf.String() r.Body.Close() articles <- article } close(articles) }() return articles }