func main() { log.Println("201402191") client := &http.Client{} p, err := com.HttpGetBytes(client, "http://godoc.org/-/index", nil) if err != nil { log.Fatalf("Fail to load page: %v", err) } content := string(p) start := strings.Index(content, "<tbody>") + 7 end := strings.Index(content, "</tbody>") content = content[start:end] pkgs := strings.Split(content, "<tr>")[1:] skipUntilIndex := 9052 endWhenIndex := 12000 for i, name := range pkgs { if i < skipUntilIndex { continue } else if i == endWhenIndex { break } name = strings.TrimSpace(name)[14:] end := strings.Index(name, "\">") name = name[:end] log.Printf("#%d %s", i, name) _, err = com.HttpGet(client, "https://gowalker.org/"+name, nil) if err != nil { log.Fatalf("Fail to load page: %v", err) } time.Sleep(0 * time.Second) } }
func TestJobsRegionsPaginate(t *testing.T) { pg := []struct { from, to int }{ {0, 1}, {0, 2}, {1, 4}, } for _, page := range pg { for _, reg := range regionsSample { paginate := fmt.Sprintf("%s/jobs/regions/%s/%d/%d", ts.URL, reg.short, page.from, page.to) b, err := com.HttpGetBytes(client, paginate, nil) if err != nil { t.Errorf("getting regions home page %v", err) } doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(b))) if err != nil { t.Errorf("loading document %v", err) } s := doc.Find(".job-item") d := page.to - page.from if s.Length() != d { t.Errorf("expected %d got %d", d, s.Length()) } } } }
// // // DOCS // // func TestDocsHome(t *testing.T) { l := fmt.Sprintf("%s/docs", ts.URL) b, err := com.HttpGetBytes(client, l, nil) if err != nil { t.Errorf("getting docs home %v", err) } if !bytes.Contains(b, []byte("home.md")) { t.Errorf("ecpectd docs home got %s", b) } }
func TestJobsRegionsHome(t *testing.T) { home := fmt.Sprintf("%s/jobs/regions", ts.URL) b, err := com.HttpGetBytes(client, home, nil) if err != nil { t.Errorf("getting regions home page %v", err) } if !bytes.Contains(b, []byte(regionsSample[1].name)) { t.Errorf("expected %s to contain %s", b, regionsSample[1].name) } }
func getGitTree(url string) *gitTree { p, err := com.HttpGetBytes(&http.Client{}, url, nil) if err != nil { log.Fatal(err) } var t gitTree if err = json.Unmarshal(p, &t); err != nil { log.Fatal(err) } return &t }
func TestJobsNewGet(t *testing.T) { l := fmt.Sprintf("%s/jobs/new", dashPath) b, err := com.HttpGetBytes(client, l, nil) if err != nil { t.Errorf("getting dashboard home %v", err) } title := "<title>new job</title>" if !bytes.Contains(b, []byte(title)) { t.Errorf(" expected new job page got %s", b) } }
// // // DASHBOARD // // func TestHome(t *testing.T) { l := fmt.Sprintf("%s/", dashPath) b, err := com.HttpGetBytes(client, l, nil) if err != nil { t.Errorf("getting dashboard home %v", err) } title := "<title>dashboard</title>" if !bytes.Contains(b, []byte(title)) { t.Errorf(" expected login page got %s", b) } }
func TestSetLang(t *testing.T) { sw := fmt.Sprintf("%s/language/sw", ts.URL) b, err := com.HttpGetBytes(client, sw, nil) if err != nil { t.Errorf("getting home page %v", err) } // Yes a home page should contain swahili words if !bytes.Contains(b, []byte("nyumbani")) { t.Errorf("expected home page to be in swahili got %s", b) } }
func getGoogleVCS(client *http.Client, match map[string]string) error { // Scrape the HTML project page to find the VCS. p, err := com.HttpGetBytes(client, com.Expand("http://code.google.com/p/{repo}/source/checkout", match), nil) if err != nil { return errors.New("doc.getGoogleVCS(" + match["importPath"] + ") -> " + err.Error()) } m := googleRepoRe.FindSubmatch(p) if m == nil { return com.NotFoundError{"Could not VCS on Google Code project page."} } match["vcs"] = string(m[1]) return nil }
func TestGetRegister(t *testing.T) { l := fmt.Sprintf("%s%s", ts.URL, registerPath) b, err := com.HttpGetBytes(client, l, nil) if err != nil { t.Errorf("getting login page %v", err) } // The title of the page should be set to register title := "<title>register</title>" if !bytes.Contains(b, []byte(title)) { t.Errorf(" expected login page got %s", b) } }
func getGolangRevision(client *http.Client, n *Node) error { match := map[string]string{} { m := golangPattern.FindStringSubmatch(n.ImportPath) for i, n := range golangPattern.SubexpNames() { if n != "" { match[n] = m[i] } } setupGoogleMatch(match) } match["repo"] = "go" if len(n.Value) == 0 { // Scrape the HTML project page to find the VCS. p, err := com.HttpGetBytes(client, com.Expand("http://code.google.com/p/{repo}/source/checkout", match), nil) if err != nil { return fmt.Errorf("fail to fetch page: %v", err) } m := googleRepoRe.FindSubmatch(p) if m == nil { return fmt.Errorf("cannot find VCS on Google Code project page") } match["vcs"] = string(m[1]) n.Value = defaultTags[match["vcs"]] } match["tag"] = n.Value data, err := com.HttpGetBytes(client, com.Expand("http://code.google.com/p/{repo}/source/browse/?repo={subrepo}&r={tag}", match), nil) if err != nil { return fmt.Errorf("fail to get revision(%s): %v", n.ImportPath, err) } m := googleRevisionPattern.FindSubmatch(data) if m == nil { return fmt.Errorf("cannot find revision in page: %s", n.ImportPath) } n.Revision = strings.TrimPrefix(string(m[0]), `_setViewedRevision('`) n.ArchivePath = path.Join(setting.ArchivePath, n.ImportPath, n.Revision+".zip") return nil }
func getGoogleVCS(client *http.Client, match map[string]string) error { // Scrape the HTML project page to find the VCS. p, err := com.HttpGetBytes(client, com.Expand("http://code.google.com/p/{repo}/source/checkout", match), nil) if err != nil { return fmt.Errorf("fail to fetch page: %v", err) } m := googleRepoRe.FindSubmatch(p) if m == nil { return com.NotFoundError{"Could not VCS on Google Code project page."} } match["vcs"] = string(m[1]) return nil }
func TestDocs(t *testing.T) { // with .md extension l := fmt.Sprintf("%s/docs/home.md", ts.URL) b, err := com.HttpGetBytes(client, l, nil) if err != nil { t.Errorf("getting docs home %v", err) } if !bytes.Contains(b, []byte("home.md")) { t.Errorf("ecpectd docs home got %s", b) } // without .md extsnison l = fmt.Sprintf("%s/docs/home", ts.URL) b, err = com.HttpGetBytes(client, l, nil) if err != nil { t.Errorf("getting docs home %v", err) } if !bytes.Contains(b, []byte("home.md")) { t.Errorf("ecpectd docs home got %s", b) } }
// getValidTLDs gets and returns list of valid TLDs. func getValidTLDs() (validTLDs []string) { p, err := com.HttpGetBytes(&http.Client{}, "http://data.iana.org/TLD/tlds-alpha-by-domain.txt", nil) if err != nil { log.Fatal(err) } for _, line := range strings.Split(string(p), "\n") { line = strings.TrimSpace(line) if len(line) == 0 || line[0] == '#' { continue } validTLDs = append(validTLDs, "."+strings.ToLower(line)) } return validTLDs }
func TestJobsHome(t *testing.T) { home := fmt.Sprintf("%s/jobs/", ts.URL) b, err := com.HttpGetBytes(client, home, nil) if err != nil { t.Errorf("getting home page %v", err) } doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(b))) if err != nil { t.Errorf("loading document %v", err) } s := doc.Find(".job-item") if s.Length() < 8 { t.Errorf("expected 8 jobs got %d", s.Length()) } }
func SiteFetchNewFeed(siteId int64) error { site, e := GetSite(siteId) if e != nil { return e } // exit if fetched recently if !site.FeedFetched.IsZero() && time.Since(site.FeedFetched) < 5*time.Minute { return nil } links, e := linkpreview.Articler.Index(site.Domain) if e != nil { return e } for _, link := range links { if fetched, e := SiteLinkFetched(link); !fetched && e == nil { color.Green("fetch %s", link) cl := http.DefaultClient cl.Timeout = 3 * time.Second bts, e := com.HttpGetBytes(cl, link, http.Header{"User-Agent": {articler.HTTPUserAgent}}) if e != nil { color.Red("%s", e) continue } art, e := linkpreview.Articler.ParseArticle(link, bts) if e != nil { color.Red("%s", e) continue } art.Text = string(bts) sf := NewSiteFeedFromArticle(art) sf.SiteId = siteId e = SaveSiteFeed(sf) if e != nil { color.Red("%s", e) continue } } } site.FeedFetched = time.Now() return SaveSite(site) //x.Where("created < ? and site_id = ?", time.Now().Truncate(5*time.Minute), siteId) }
func getGithubRevision(importPath string) (string, error) { data, err := com.HttpGetBytes(Client, fmt.Sprintf("https://%s/commits/master", importPath), nil) if err != nil { return "", fmt.Errorf("fetch revision page: %v", err) } i := bytes.Index(data, []byte(`btn-outline`)) if i == -1 { return "", errors.New("find revision locater: not found") } data = data[i+1:] m := githubRevisionPattern.FindSubmatch(data) if m == nil { return "", fmt.Errorf("find revision: not found") } return strings.TrimPrefix(string(m[0]), `data-clipboard-text="`), nil }
func TestLogout(t *testing.T) { l := fmt.Sprintf("%s%s", ts.URL, logoutPath) b, err := com.HttpGetBytes(client, l, nil) if err != nil { t.Errorf("getting login page %v", err) } // should redirect to home page title := "<title>zedlist</title>" if !bytes.Contains(b, []byte(title)) { t.Errorf(" expected home page got %s", b) } // should not contain the logout button outButton := "logout" if bytes.Contains(b, []byte(outButton)) { t.Errorf(" expected home page without logout button got %s", b) } }
func TestPostRegister(t *testing.T) { l := fmt.Sprintf("%s%s", ts.URL, registerPath) vars := url.Values{ "first_name": {"geofrey"}, "last_name": {"enrnest"}, "middle_name": {"gernest"}, "email": {"*****@*****.**"}, "password": {"kilimahewa"}, "confirm_password": {"kilimahewa"}, "gender": {"1"}, "birth_date": {"2 January, 1980"}, } // lets obtain the csrf_token to submit with the form. b, err := com.HttpGetBytes(client, l, nil) if err != nil { t.Errorf("getting login page %v", err) } doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(b))) if err != nil { t.Errorf("loading document %v", err) } token, ok := doc.Find("#token").Attr("value") if !ok { t.Errorf("expected crsf to ken to be set") } vars.Set("csrf_token", token) resp, err := client.PostForm(l, vars) if err != nil { t.Errorf(" posting registration form %v", err) } defer resp.Body.Close() buf := &bytes.Buffer{} io.Copy(buf, resp.Body) // SHould redirect to login page if registration is successful // The title of the page should be set to login title := "<title>login</title>" if !bytes.Contains(buf.Bytes(), []byte(title)) { t.Errorf(" expected login page got %s", buf) } }
func TestJobsRegionsByShortName(t *testing.T) { regs, err := query.GetAllRegions() if err != nil { t.Errorf("retriving regions %v", err) } for _, v := range regs { regHome := fmt.Sprintf("%s/jobs/regions/%s", ts.URL, v.Short) b, err := com.HttpGetBytes(client, regHome, nil) if err != nil { t.Errorf("getting regions home page %v", err) } doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(b))) if err != nil { t.Errorf("loading document %v", err) } s := doc.Find(".job-item") if s.Length() != 4 { t.Errorf("expected 4 got %d", s.Length()) } } }
func getGithubRevision(client *http.Client, n *Node) error { if len(n.Value) == 0 { n.Value = "master" } data, err := com.HttpGetBytes(client, fmt.Sprintf("https://%s/commits/%s", n.ImportPath, n.Value), nil) if err != nil { return fmt.Errorf("fail to get revision(%s): %v", n.ImportPath, err) } i := bytes.Index(data, []byte(`btn-outline`)) if i == -1 { return fmt.Errorf("cannot find locater in page: %s", n.ImportPath) } data = data[i+1:] m := githubRevisionPattern.FindSubmatch(data) if m == nil { return fmt.Errorf("cannot find revision in page: %s", n.ImportPath) } n.Revision = strings.TrimPrefix(string(m[0]), `data-clipboard-text="`) n.ArchivePath = path.Join(setting.ArchivePath, n.ImportPath, n.Revision+".zip") return nil }
func getOSCTags(client *http.Client, tagsPath string) []string { p, err := com.HttpGetBytes(client, tagsPath, nil) if err != nil { return nil } tags := make([]string, 1, 6) tags[0] = "master" page := string(p) start := strings.Index(page, "<ul class='bordered-list'>") if start > -1 { m := oscTagRe.FindAllStringSubmatch(page[start:], -1) for i, v := range m { tags = append(tags, v[1]) if i == 4 { break } } } return tags }
func getBitbucketRevision(client *http.Client, n *Node) error { if len(n.Value) == 0 { var repo struct { Scm string } if err := com.HttpGetJSON(client, fmt.Sprintf("https://api.bitbucket.org/1.0/repositories/%s", strings.TrimPrefix(n.ImportPath, "bitbucket.org/")), &repo); err != nil { return fmt.Errorf("fail to fetch page: %v", err) } n.Value = defaultTags[repo.Scm] } data, err := com.HttpGetBytes(client, fmt.Sprintf("https://%s/commits/%s", n.ImportPath, n.Value), nil) if err != nil { return fmt.Errorf("fail to get revision(%s): %v", n.ImportPath, err) } m := bitbucketRevisionPattern.FindSubmatch(data) if m == nil { return fmt.Errorf("cannot find revision in page: %s", n.ImportPath) } n.Revision = strings.TrimPrefix(string(m[0]), `data-revision="`) n.ArchivePath = path.Join(setting.ArchivePath, n.ImportPath, n.Revision+".zip") return nil }
func TestHome(t *testing.T) { // Migrate, this is going to be used by all subsequent tests. for _, v := range regionsSample { q := db.Conn.FirstOrCreate(&models.Region{}, models.Region{Name: v.name, Short: v.short}) if q.Error != nil { t.Errorf("migrating regios %v", q.Error) } } regs, err := query.GetAllRegions() if err != nil { t.Errorf("retrieving regions %v", err) } for _, v := range regs { for _, job := range JobsSample { j := &models.Job{} j.Title = job.title j.Description = job.desc j.Region = *v q := db.Conn.Create(j) if q.Error != nil { t.Errorf("creating jobs %v", q.Error) } } } b, err := com.HttpGetBytes(client, ts.URL, nil) if err != nil { t.Errorf("getting home page %v", err) } // Yes a home page should contain zedlist tittle if !bytes.Contains(b, []byte("<title>zedlist</title>")) { t.Errorf("expected home page got %s", b) } }
// getGithubDoc downloads tarball from github.com. func getGithubDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { match["cred"] = GetGithubCredentials() // Check downlaod type. switch nod.Type { case BRANCH: if len(nod.Value) == 0 { match["sha"] = MASTER // Only get and check revision with the latest version. var refs []*struct { Ref string Url string Object struct { Sha string Type string Url string } } err := com.HttpGetJSON(client, com.Expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), &refs) if err != nil { if strings.Contains(err.Error(), "403") { break } log.Warn("GET", "Fail to get revision") log.Warn("", err.Error()) break } var etag string COMMIT_LOOP: for _, ref := range refs { switch { case strings.HasPrefix(ref.Ref, "refs/heads/master"): etag = ref.Object.Sha break COMMIT_LOOP } } if etag == nod.Revision { log.Log("GET Package hasn't changed: %s", nod.ImportPath) return nil, nil } nod.Revision = etag } else { match["sha"] = nod.Value } case TAG, COMMIT: match["sha"] = nod.Value default: return nil, errors.New("Unknown node type: " + nod.Type) } // We use .zip here. // zip: https://github.com/{owner}/{repo}/archive/{sha}.zip // tarball: https://github.com/{owner}/{repo}/tarball/{sha} // Downlaod archive. p, err := com.HttpGetBytes(client, com.Expand("https://github.com/{owner}/{repo}/archive/{sha}.zip", match), nil) if err != nil { return nil, errors.New("Fail to donwload Github repo -> " + err.Error()) } shaName := com.Expand("{repo}-{sha}", match) if nod.Type == "tag" { shaName = strings.Replace(shaName, "-v", "-", 1) } var installPath string if nod.ImportPath == nod.DownloadURL { suf := "." + nod.Value if len(suf) == 1 { suf = "" } projectPath := com.Expand("github.com/{owner}/{repo}", match) installPath = installRepoPath + "/" + projectPath + suf nod.ImportPath = projectPath } else { installPath = installRepoPath + "/" + nod.ImportPath } // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) r, err := zip.NewReader(bytes.NewReader(p), int64(len(p))) if err != nil { return nil, errors.New(nod.ImportPath + " -> new zip: " + err.Error()) } dirs := make([]string, 0, 5) // Need to add root path because we cannot get from tarball. dirs = append(dirs, installPath+"/") for _, f := range r.File { absPath := strings.Replace(f.Name, shaName, installPath, 1) // Create diretory before create file. os.MkdirAll(path.Dir(absPath)+"/", os.ModePerm) compareDir: switch { case strings.HasSuffix(absPath, "/"): // Directory. // Check if current directory is example. if !(!ctx.Bool("example") && strings.Contains(absPath, "example")) { for _, d := range dirs { if d == absPath { break compareDir } } dirs = append(dirs, absPath) } default: // Get file from archive. r, err := f.Open() if err != nil { return nil, err } fbytes := make([]byte, f.FileInfo().Size()) _, err = io.ReadFull(r, fbytes) if err != nil { return nil, err } if err = com.WriteFile(absPath, fbytes); err != nil { return nil, err } // Set modify time. os.Chtimes(absPath, f.ModTime(), f.ModTime()) } } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d, match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }
// getGithubDoc downloads tarball from git.oschina.com. func getOSCDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { // Check downlaod type. switch nod.Type { case BRANCH: if len(nod.Value) == 0 { match["sha"] = MASTER } else { match["sha"] = nod.Value } case TAG, COMMIT: match["sha"] = nod.Value default: return nil, errors.New("Unknown node type: " + nod.Type) } // zip: http://{projectRoot}/repository/archive?ref={sha} // Downlaod archive. p, err := com.HttpGetBytes(client, com.Expand("http://git.oschina.net/{owner}/{repo}/repository/archive?ref={sha}", match), nil) if err != nil { return nil, errors.New("Fail to donwload OSChina repo -> " + err.Error()) } var installPath string if nod.ImportPath == nod.DownloadURL { suf := "." + nod.Value if len(suf) == 1 { suf = "" } projectPath := com.Expand("git.oschina.net/{owner}/{repo}", match) installPath = installRepoPath + "/" + projectPath + suf nod.ImportPath = projectPath } else { installPath = installRepoPath + "/" + nod.ImportPath } // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) r, err := zip.NewReader(bytes.NewReader(p), int64(len(p))) if err != nil { return nil, errors.New("Fail to unzip OSChina repo -> " + err.Error()) } nameLen := len(match["repo"]) dirs := make([]string, 0, 5) // Need to add root path because we cannot get from tarball. dirs = append(dirs, installPath+"/") for _, f := range r.File { fileName := f.Name[nameLen+1:] absPath := installPath + "/" + fileName if strings.HasSuffix(absPath, "/") { dirs = append(dirs, absPath) os.MkdirAll(absPath, os.ModePerm) continue } // Get file from archive. r, err := f.Open() if err != nil { return nil, errors.New("Fail to open OSChina repo -> " + err.Error()) } fbytes := make([]byte, f.FileInfo().Size()) _, err = io.ReadFull(r, fbytes) if err != nil { return nil, err } if err = com.WriteFile(absPath, fbytes); err != nil { return nil, err } } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d, match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }
// getBitbucketDoc downloads tarball from bitbucket.org. func getBitbucketDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { // Check version control. if m := bitbucketEtagRe.FindStringSubmatch(nod.Value); m != nil { match["vcs"] = m[1] } else { var repo struct { Scm string } if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil { return nil, err } match["vcs"] = repo.Scm } if nod.Type == BRANCH { if len(nod.Value) == 0 { match["commit"] = defaultTags[match["vcs"]] } else { match["commit"] = nod.Value } } if nod.IsGetDeps { if nod.Type == COMMIT { tags := make(map[string]string) for _, nodeType := range []string{"branches", "tags"} { var nodes map[string]struct { Node string } if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/{0}", match, nodeType), &nodes); err != nil { return nil, err } for t, n := range nodes { tags[t] = n.Node } } // Check revision tag. var err error match["tag"], match["commit"], err = bestTag(tags, defaultTags[match["vcs"]]) if err != nil { return nil, err } nod.Value = match["commit"] } } else { // Check downlaod type. switch nod.Type { case TAG, COMMIT, BRANCH: match["commit"] = nod.Value default: return nil, errors.New("Unknown node type: " + nod.Type) } } // We use .tar.gz here. // zip : https://bitbucket.org/{owner}/{repo}/get/{commit}.zip // tarball : https://bitbucket.org/{owner}/{repo}/get/{commit}.tar.gz // Downlaod archive. p, err := com.HttpGetBytes(client, com.Expand("https://bitbucket.org/{owner}/{repo}/get/{commit}.tar.gz", match), nil) if err != nil { return nil, err } var installPath string if nod.ImportPath == nod.DownloadURL { suf := "." + nod.Value if len(suf) == 1 { suf = "" } projectPath := com.Expand("bitbucket.org/{owner}/{repo}", match) installPath = installRepoPath + "/" + projectPath + suf nod.ImportPath = projectPath } else { installPath = installRepoPath + "/" + nod.ImportPath } // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) gzr, err := gzip.NewReader(bytes.NewReader(p)) if err != nil { return nil, err } defer gzr.Close() tr := tar.NewReader(gzr) var autoPath string // Auto path is the root path that generated by bitbucket.org. // Get source file data. dirs := make([]string, 0, 5) for { h, err := tr.Next() if err == io.EOF { break } else if err != nil { return nil, err } fn := h.Name // In case that we find directory, usually we should not. if strings.HasSuffix(fn, "/") { continue } // Check root path. if len(autoPath) == 0 { autoPath = fn[:strings.Index(fn, "/")] } absPath := strings.Replace(fn, autoPath, installPath, 1) // Create diretory before create file. dir := path.Dir(absPath) if !checkDir(dir, dirs) && !(!ctx.Bool("example") && strings.Contains(absPath, "example")) { dirs = append(dirs, dir) os.MkdirAll(dir+"/", os.ModePerm) } // Get data from archive. fbytes := make([]byte, h.Size) if _, err := io.ReadFull(tr, fbytes); err != nil { return nil, err } _, err = com.SaveFile(absPath, fbytes) if err != nil { return nil, err } // Set modify time. os.Chtimes(absPath, h.AccessTime, h.ModTime) } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d+"/", match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }
// getLaunchpadDoc downloads tarball from launchpad.net. func getLaunchpadDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { if match["project"] != "" && match["series"] != "" { rc, err := com.HttpGet(client, com.Expand("https://code.launchpad.net/{project}{series}/.bzr/branch-format", match), nil) _, isNotFound := err.(com.NotFoundError) switch { case err == nil: rc.Close() // The structure of the import path is launchpad.net/{root}/{dir}. case isNotFound: // The structure of the import path is is launchpad.net/{project}/{dir}. match["repo"] = match["project"] match["dir"] = com.Expand("{series}{dir}", match) default: return nil, err } } var downloadPath string // Check if download with specific revision. if len(nod.Value) == 0 { downloadPath = com.Expand("https://bazaar.launchpad.net/+branch/{repo}/tarball", match) } else { downloadPath = com.Expand("https://bazaar.launchpad.net/+branch/{repo}/tarball/"+nod.Value, match) } // Scrape the repo browser to find the project revision and individual Go files. p, err := com.HttpGetBytes(client, downloadPath, nil) if err != nil { return nil, err } installPath := installRepoPath + "/" + nod.ImportPath // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) gzr, err := gzip.NewReader(bytes.NewReader(p)) if err != nil { return nil, err } defer gzr.Close() tr := tar.NewReader(gzr) var autoPath string // Auto path is the root path that generated by bitbucket.org. // Get source file data. dirs := make([]string, 0, 5) for { h, err := tr.Next() if err == io.EOF { break } else if err != nil { return nil, err } fn := h.Name // Check root path. if len(autoPath) == 0 { autoPath = fn[:strings.Index(fn, match["repo"])+len(match["repo"])] } absPath := strings.Replace(fn, autoPath, installPath, 1) switch { case h.FileInfo().IsDir(): // Directory. // Create diretory before create file. os.MkdirAll(absPath+"/", os.ModePerm) // Check if current directory is example. if !(!ctx.Bool("example") && strings.Contains(absPath, "example")) { dirs = append(dirs, absPath) } case !strings.HasPrefix(fn, "."): // Get data from archive. fbytes := make([]byte, h.Size) if _, err := io.ReadFull(tr, fbytes); err != nil { return nil, err } if err = com.WriteFile(absPath, fbytes); err != nil { return nil, err } } } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d+"/", match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }
// getGoogleDoc downloads raw files from code.google.com. func getGoogleDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { setupGoogleMatch(match) // Check version control. if err := getGoogleVCS(client, match); err != nil { return nil, errors.New("fail to get vcs " + nod.ImportPath + " : " + err.Error()) } switch nod.Type { case BRANCH: if len(nod.Value) == 0 { match["tag"] = defaultTags[match["vcs"]] // Only get and check revision with the latest version. p, err := com.HttpGetBytes(client, com.Expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/?r={tag}", match), nil) if err != nil { log.Error("GET", "Fail to get revision") log.Error("", err.Error()) break } if m := googleRevisionRe.FindSubmatch(p); m == nil { log.Error("GET", "Fail to get revision") log.Error("", err.Error()) } else { etag := string(m[1]) if etag == nod.Revision { log.Log("GET Package hasn't changed: %s", nod.ImportPath) return nil, nil } nod.Revision = etag } } else { match["tag"] = nod.Value } case TAG, COMMIT: match["tag"] = nod.Value default: return nil, errors.New("Unknown node type: " + nod.Type) } var installPath string projectPath := GetProjectPath(nod.ImportPath) if nod.ImportPath == nod.DownloadURL { suf := "." + nod.Value if len(suf) == 1 { suf = "" } installPath = installRepoPath + "/" + projectPath + suf } else { installPath = installRepoPath + "/" + projectPath } // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) if match["vcs"] == "svn" { com.ColorLog("[WARN] SVN detected, may take very long time.\n") rootPath := com.Expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}", match) d, f := path.Split(rootPath) err := downloadFiles(client, match, d, installPath+"/", match["tag"], []string{f + "/"}) if err != nil { return nil, errors.New("Fail to download " + nod.ImportPath + " : " + err.Error()) } } p, err := com.HttpGetBytes(client, com.Expand("http://{subrepo}{dot}{repo}.googlecode.com/archive/{tag}.zip", match), nil) if err != nil { return nil, errors.New("Fail to download " + nod.ImportPath + " : " + err.Error()) } r, err := zip.NewReader(bytes.NewReader(p), int64(len(p))) if err != nil { return nil, errors.New(nod.ImportPath + " -> new zip: " + err.Error()) } nameLen := strings.Index(r.File[0].Name, "/") dirPrefix := match["dir"] if len(dirPrefix) != 0 { dirPrefix = dirPrefix[1:] + "/" } dirs := make([]string, 0, 5) for _, f := range r.File { absPath := strings.Replace(f.Name, f.Name[:nameLen], installPath, 1) // Create diretory before create file. dir := path.Dir(absPath) if !checkDir(dir, dirs) && !(!ctx.Bool("example") && strings.Contains(absPath, "example")) { dirs = append(dirs, dir+"/") os.MkdirAll(dir+"/", os.ModePerm) } // Get file from archive. r, err := f.Open() if err != nil { return nil, err } fbytes := make([]byte, f.FileInfo().Size()) _, err = io.ReadFull(r, fbytes) if err != nil { return nil, err } _, err = com.SaveFile(absPath, fbytes) if err != nil { return nil, err } } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d, match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }
func downloadFiles(client *http.Client, match map[string]string, rootPath, installPath, commit string, dirs []string) error { suf := "?r=" + commit if len(commit) == 0 { suf = "" } for _, d := range dirs { p, err := com.HttpGetBytes(client, rootPath+d+suf, nil) if err != nil { return err } // Create destination directory. os.MkdirAll(installPath+d, os.ModePerm) // Get source files in current path. files := make([]com.RawFile, 0, 5) for _, m := range googleFileRe.FindAllSubmatch(p, -1) { fname := strings.Split(string(m[1]), "?")[0] files = append(files, &rawFile{ name: fname, rawURL: rootPath + d + fname + suf, }) } // Fetch files from VCS. if err := com.FetchFilesCurl(files); err != nil { return err } // Save files. for _, f := range files { absPath := installPath + d // Create diretory before create file. os.MkdirAll(path.Dir(absPath), os.ModePerm) // Write data to file fw, err := os.Create(absPath + f.Name()) if err != nil { return err } _, err = fw.Write(f.Data()) fw.Close() if err != nil { return err } } files = nil subdirs := make([]string, 0, 3) // Get subdirectories. for _, m := range googleDirRe.FindAllSubmatch(p, -1) { dirName := strings.Split(string(m[1]), "?")[0] if strings.HasSuffix(dirName, "/") { subdirs = append(subdirs, d+dirName) } } err = downloadFiles(client, match, rootPath, installPath, commit, subdirs) if err != nil { return err } } return nil }