func (as *ArticleService) msgWrite(isUpdate bool, article *model.Article) { data := map[string]string{ "type": fmt.Sprint(model.MESSAGE_TYPE_ARTICLE_CREATE), "author": article.User().Name, "link": article.Href(), "title": article.Title, "time": utils.TimeUnixFormat(article.CreateTime, "01/02 15:04:05"), } var body string if isUpdate { data["type"] = fmt.Sprint(model.MESSAGE_TYPE_ARTICLE_UPDATE) data["time"] = utils.TimeUnixFormat(article.UpdateTime, "01/02 15:04:05") body = com.Expand(MessageArticleUpdateTemplate, data) } else { body = com.Expand(MessageArticleCreateTemplate, data) } message := &model.Message{ UserId: article.UserId, From: model.MESSAGE_FROM_ARTICLE, FromId: article.Id, Type: model.MESSAGE_TYPE_ARTICLE_CREATE, Body: body, CreateTime: article.CreateTime, } if isUpdate { message.Type = model.MESSAGE_TYPE_ARTICLE_UPDATE } Message.Save(message) }
func (cs *CommentService) msgSave(cmt *model.Comment) { data := map[string]string{ "type": fmt.Sprint(model.MESSAGE_TYPE_COMMENT_CREATE), "time": utils.TimeUnixFormat(cmt.CreateTime, "01/02 15:04:05"), "author": cmt.Name, "site": cmt.AuthorUrl(), "body": cmt.Body, "title": cmt.FromTitle(), } message := &model.Message{ UserId: cmt.UserId, From: model.MESSAGE_FROM_COMMENT, FromId: cmt.Id, Type: model.MESSAGE_TYPE_COMMENT_CREATE, CreateTime: cmt.CreateTime, } if cmt.ParentId > 0 { if p := cmt.GetParent(); p != nil { data["parent"] = p.Name data["parent_content"] = p.Body fmt.Println("--------------", p) message.Type = model.MESSAGE_TYPE_COMMENT_REPLY message.Body = com.Expand(MessageCommentReplyTemplate, data) Message.Save(message) return } } message.Body = com.Expand(MessageCommentLeaveTemplate, data) Message.Save(message) }
func getDynamic(importPath, etag string) (pdoc *Package, err error) { match, err := fetchMeta(importPath) if err != nil { return nil, err } if match["projectRoot"] != importPath { rootMatch, err := fetchMeta(match["projectRoot"]) if err != nil { return nil, err } if rootMatch["projectRoot"] != match["projectRoot"] { return nil, errors.New("Project root mismatch") } } pdoc, err = getStatic(com.Expand("{repo}{dir}", match), etag) // if err == ErrNoServiceMatch { // pdoc, err = getVCSDoc(client, match, etag) // } if err != nil { return nil, err } if pdoc != nil { pdoc.ImportPath = importPath pdoc.ProjectPath = importPath // pdoc.ProjectName = match["projectName"] } return pdoc, err }
// earlyResponseForGoGetMeta responses appropriate go-get meta with status 200 // if user does not have actual access to the requested repository, // or the owner or repository does not exist at all. // This is particular a workaround for "go get" command which does not respect // .netrc file. func earlyResponseForGoGetMeta(ctx *Context) { ctx.PlainText(200, []byte(com.Expand(`<meta name="go-import" content="{GoGetImport} git {CloneLink}">`, map[string]string{ "GoGetImport": composeGoGetImport(ctx.Params(":username"), ctx.Params(":reponame")), "CloneLink": models.ComposeHTTPSCloneURL(ctx.Params(":username"), ctx.Params(":reponame")), }))) }
func (as *ArticleService) msgDelete(id int64) { article := new(model.Article) if _, err := core.Db.Where("id = ?", id).Get(article); err != nil { return } if article == nil || article.Id != id { return } data := map[string]string{ "type": fmt.Sprint(model.MESSAGE_TYPE_ARTICLE_REMOVE), "author": article.User().Name, "title": article.Title, "time": utils.TimeUnixFormat(article.CreateTime, "01/02 15:04:05"), } body := com.Expand(MessageArticleRemoveTemplate, data) message := &model.Message{ UserId: article.UserId, From: model.MESSAGE_FROM_ARTICLE, FromId: article.Id, Type: model.MESSAGE_TYPE_ARTICLE_REMOVE, Body: body, CreateTime: article.CreateTime, } Message.Save(message) }
func getDynamic(client *http.Client, importPath, tag, etag string) (pdoc *hv.Package, err error) { match, err := fetchMeta(client, importPath) if err != nil { return nil, err } if match["projectRoot"] != importPath { rootMatch, err := fetchMeta(client, match["projectRoot"]) if err != nil { return nil, err } if rootMatch["projectRoot"] != match["projectRoot"] { return nil, com.NotFoundError{"Project root mismatch."} } } pdoc, err = getStatic(client, com.Expand("{repo}{dir}", match), tag, etag) if err == errNoMatch { //pdoc, err = getVCSDoc(client, match, etag) } if err != nil { return nil, err } if pdoc != nil { pdoc.ImportPath = importPath pdoc.ProjectName = match["projectName"] } return pdoc, err }
// RenderIssueIndexPattern renders issue indexes to corresponding links. func RenderIssueIndexPattern(rawBytes []byte, urlPrefix string, metas map[string]string) []byte { urlPrefix = cutoutVerbosePrefix(urlPrefix) pattern := IssueNumericPattern if metas["style"] == ISSUE_NAME_STYLE_ALPHANUMERIC { pattern = IssueAlphanumericPattern } ms := pattern.FindAll(rawBytes, -1) for _, m := range ms { if m[0] == ' ' || m[0] == '(' { m = m[1:] // ignore leading space or opening parentheses } var link string if metas == nil { link = fmt.Sprintf(`<a href="%s/issues/%s">%s</a>`, urlPrefix, m[1:], m) } else { // Support for external issue tracker if metas["style"] == ISSUE_NAME_STYLE_ALPHANUMERIC { metas["index"] = string(m) } else { metas["index"] = string(m[1:]) } link = fmt.Sprintf(`<a href="%s">%s</a>`, com.Expand(metas["format"], metas), m) } rawBytes = bytes.Replace(rawBytes, m, []byte(link), 1) } return rawBytes }
func getDynamic(importPath, etag string) (pdoc *Package, err error) { match, err := fetchMeta(importPath) if err != nil { return nil, err } if match["projectRoot"] != importPath { rootMatch, err := fetchMeta(match["projectRoot"]) if err != nil { return nil, err } if rootMatch["projectRoot"] != match["projectRoot"] { return nil, errors.New("Project root mismatch") } } if strings.HasPrefix(match["repo"], "go.googlesource.com") { match["dir"] = "/" + path.Base(match["repo"]) match["repo"] = "github.com/golang" } pdoc, err = getStatic(com.Expand("{repo}{dir}", match), etag) if err == ErrNoServiceMatch { pdoc, err = getVCSDoc(match, etag) } else if pdoc != nil { pdoc.ImportPath = importPath } if err != nil { return nil, err } return pdoc, err }
func renderEditPage(ctx *middleware.Context, documentPath string) { if setting.Extension.EnableEditPage { ctx.Data["EditPageLink"] = com.Expand(setting.Extension.EditPageLinkFormat, map[string]string{ "lang": ctx.Locale.Language(), "blob": documentPath + ".md", }) } }
func NewSearch(key string) (*SearchPackages, error) { url := com.Expand(searchApi, map[string]string{ "keyword": key, }) packages := new(SearchPackages) err := com.HttpGetJSON(httpClient, url, packages) return packages, err }
func ExampleExpand() { match := map[string]string{ "domain": "gowalker.org", "subdomain": "github.com", } s := "http://{domain}/{subdomain}/{0}/{1}" fmt.Println(com.Expand(s, match, "Unknwon", "gowalker")) // Output: http://gowalker.org/github.com/Unknwon/gowalker }
func getBitbucketArchive(client *http.Client, match map[string]string, n *Node) error { match["sha"] = n.Revision // Downlaod archive. if err := com.HttpGetToFile(client, com.Expand("https://bitbucket.org/{owner}/{repo}/get/{sha}.zip", match), nil, n.ArchivePath); err != nil { return fmt.Errorf("fail to download archive(%s): %v", n.ImportPath, err) } return nil }
func (r *Rpc) GetMission(args *HostInfo, rep *Mission) error { log.Debugf("arch: %v, host: %v", args.Arch, args.Host) tasks, err := models.GetAvaliableTasks(args.Os, args.Arch) if err == models.ErrTaskNotAvaliable { rep.Idle = time.Second * 3 return nil } if err != nil { log.Errorf("rpc: get mission error: %v", err) return err } task := tasks[0] // use first task rep.Mid = task.Id rep.Repo = task.Repo rep.PushURI = task.PushType + ":" + task.PushValue rep.CgoEnable = task.CgoEnable rep.PkgInfo, _ = json.MarshalIndent(PkgInfo{ PushURI: task.PushType + ":" + task.PushValue, Author: []string{"unknown"}, Description: "unknown", }, "", " ") for _, tk := range tasks { if tk.TagBranch == "" { tk.TagBranch = "temp-" + tk.PushType + ":" + tk.PushValue } filename := fmt.Sprintf("%s-%s-%s.%s", filepath.Base(rep.Repo.Uri), tk.Os, tk.Arch, "zip") if tk.Action == models.AC_SRCPKG { filename = fmt.Sprintf("%s-all-source.%s", filepath.Base(rep.Repo.Uri), "zip") } key := com.Expand("m{tid}/{reponame}/br-{branch}/{filename}", map[string]string{ "tid": strconv.Itoa(int(rep.Mid)), "reponame": rep.Repo.Uri, "branch": tk.TagBranch, "filename": filename, }) bi := BuildInfo{ Action: tk.Action, Os: tk.Os, Arch: tk.Arch, UploadType: UT_QINIU, UploadData: base.Objc2Str(QiniuInfo{ Bulket: defaultBulket, Key: key, Token: qntoken(key), }), } rep.Builds = append(rep.Builds, bi) } return nil }
func getGoogleVCS(client *http.Client, match map[string]string) error { // Scrape the HTML project page to find the VCS. p, err := com.HttpGetBytes(client, com.Expand("http://code.google.com/p/{repo}/source/checkout", match), nil) if err != nil { return errors.New("doc.getGoogleVCS(" + match["importPath"] + ") -> " + err.Error()) } m := googleRepoRe.FindSubmatch(p) if m == nil { return com.NotFoundError{"Could not VCS on Google Code project page."} } match["vcs"] = string(m[1]) return nil }
func getGoogleVCS(client *http.Client, match map[string]string) error { // Scrape the HTML project page to find the VCS. p, err := com.HttpGetBytes(client, com.Expand("http://code.google.com/p/{repo}/source/checkout", match), nil) if err != nil { return fmt.Errorf("fail to fetch page: %v", err) } m := googleRepoRe.FindSubmatch(p) if m == nil { return com.NotFoundError{"Could not VCS on Google Code project page."} } match["vcs"] = string(m[1]) return nil }
func getGoogleVCS(match map[string]string) error { // Scrape the HTML project page to find the VCS. stdout, _, err := com.ExecCmd("curl", com.Expand("http://code.google.com/p/{repo}/source/checkout", match)) if err != nil { return errors.New("doc.getGoogleVCS(" + match["importPath"] + ") -> " + err.Error()) } m := googleRepoRe.FindSubmatch([]byte(stdout)) if m == nil { return com.NotFoundError{"Could not VCS on Google Code project page."} } match["vcs"] = string(m[1]) return nil }
func getGolangRevision(client *http.Client, n *Node) error { match := map[string]string{} { m := golangPattern.FindStringSubmatch(n.ImportPath) for i, n := range golangPattern.SubexpNames() { if n != "" { match[n] = m[i] } } setupGoogleMatch(match) } match["repo"] = "go" if len(n.Value) == 0 { // Scrape the HTML project page to find the VCS. p, err := com.HttpGetBytes(client, com.Expand("http://code.google.com/p/{repo}/source/checkout", match), nil) if err != nil { return fmt.Errorf("fail to fetch page: %v", err) } m := googleRepoRe.FindSubmatch(p) if m == nil { return fmt.Errorf("cannot find VCS on Google Code project page") } match["vcs"] = string(m[1]) n.Value = defaultTags[match["vcs"]] } match["tag"] = n.Value data, err := com.HttpGetBytes(client, com.Expand("http://code.google.com/p/{repo}/source/browse/?repo={subrepo}&r={tag}", match), nil) if err != nil { return fmt.Errorf("fail to get revision(%s): %v", n.ImportPath, err) } m := googleRevisionPattern.FindSubmatch(data) if m == nil { return fmt.Errorf("cannot find revision in page: %s", n.ImportPath) } n.Revision = strings.TrimPrefix(string(m[0]), `_setViewedRevision('`) n.ArchivePath = path.Join(setting.ArchivePath, n.ImportPath, n.Revision+".zip") return nil }
func getGithubArchive(client *http.Client, match map[string]string, n *Node) error { match["sha"] = n.Revision // match["cred"] = setting.GithubCredentials // We use .zip here. // zip: https://github.com/{owner}/{repo}/archive/{sha}.zip // tarball: https://github.com/{owner}/{repo}/tarball/{sha} // Downlaod archive. if err := com.HttpGetToFile(client, com.Expand("https://github.com/{owner}/{repo}/archive/{sha}.zip", match), nil, n.ArchivePath); err != nil { return fmt.Errorf("fail to download archive(%s): %v", n.ImportPath, err) } return nil }
func getGoogleArchive(client *http.Client, match map[string]string, n *Node) error { setupGoogleMatch(match) match["tag"] = n.Revision if match["vcs"] == "svn" { return fmt.Errorf("SVN not support yet") } else { // Downlaod archive. if err := com.HttpGetToFile(client, com.Expand("http://{subrepo}{dot}{repo}.googlecode.com/archive/{tag}.zip", match), nil, n.ArchivePath); err != nil { return fmt.Errorf("fail to download archive(%s): %v", n.ImportPath, err) } } return nil }
// https://gowalker.org/api/v1/pkginfo?pkgname=github.com/astaxie/beego func GetPkgInfo(pkgname string) (*PackageItem, error) { err := RefreshPkg(pkgname) if err != nil { return nil, err } url := com.Expand(pkginfoApi, map[string]string{ "pkgname": pkgname, }) pkginfo := new(PackageItem) if err = com.HttpGetJSON(httpClient, url, pkginfo); err != nil { return nil, err } if pkginfo.Id == 0 { return nil, ErrPkgNotExists } return pkginfo, err }
func getDynamic(client *http.Client, nod *Node, installRepoPath string, ctx *cli.Context) ([]string, error) { match, err := fetchMeta(client, nod.ImportPath) if err != nil { return nil, err } if match["projectRoot"] != nod.ImportPath { rootMatch, err := fetchMeta(client, match["projectRoot"]) if err != nil { return nil, err } if rootMatch["projectRoot"] != match["projectRoot"] { return nil, com.NotFoundError{"Project root mismatch."} } } nod.DownloadURL = com.Expand("{repo}{dir}", match) return PureDownload(nod, installRepoPath, ctx) }
// RenderIssueIndexPattern renders issue indexes to corresponding links. func RenderIssueIndexPattern(rawBytes []byte, urlPrefix string, metas map[string]string) []byte { urlPrefix = cutoutVerbosePrefix(urlPrefix) ms := IssueIndexPattern.FindAll(rawBytes, -1) for _, m := range ms { var space string if m[0] != '#' { space = string(m[0]) m = m[1:] } if metas == nil { rawBytes = bytes.Replace(rawBytes, m, []byte(fmt.Sprintf(`%s<a href="%s/issues/%s">%s</a>`, space, urlPrefix, m[1:], m)), 1) } else { // Support for external issue tracker metas["index"] = string(m[1:]) rawBytes = bytes.Replace(rawBytes, m, []byte(fmt.Sprintf(`%s<a href="%s">%s</a>`, space, com.Expand(metas["format"], metas), m)), 1) } } return rawBytes }
func (bs *BackupService) msgCreate(file string) { info, err := os.Stat(file) if err != nil { return } data := map[string]string{ "type": fmt.Sprint(model.MESSAGE_TYPE_BACKUP_CREATE), "file": filepath.Base(file), "time": utils.TimeUnixFormat(info.ModTime().Unix(), "01/02 15:04:05"), } body := com.Expand(MessageBackupCreateTemplate, data) message := &model.Message{ UserId: 0, From: model.MESSAGE_FROM_BACKUP, FromId: 0, Type: model.MESSAGE_TYPE_BACKUP_CREATE, Body: body, CreateTime: info.ModTime().Unix(), } Message.Save(message) }
func (ms *MediaService) msgUpload(m *model.Media) { user, err := getUserBy("id", m.UserId) if err != nil { return } data := map[string]string{ "type": fmt.Sprint(model.MESSAGE_TYPE_MEDIA_UPLOAD), "time": utils.TimeUnixFormat(m.CreateTime, "01/02 15:04:05"), "author": user.Name, "file": m.Name, } message := &model.Message{ UserId: m.UserId, From: model.MESSAGE_FROM_MEDIA, FromId: m.Id, Type: model.MESSAGE_TYPE_MEDIA_UPLOAD, CreateTime: m.CreateTime, Body: com.Expand(MessageMediaUploadTemplate, data), } Message.Save(message) }
func getGolangDoc(importPath, etag string) (*Package, error) { match := map[string]string{ "cred": setting.GitHubCredentials, } // Check revision. commit, err := getGithubRevision("github.com/golang/go") if err != nil { return nil, fmt.Errorf("get revision: %v", err) } if commit == etag { return nil, ErrPackageNotModified } // Get files. var tree struct { Tree []struct { Url string Path string Type string } Url string } if err := com.HttpGetJSON(Client, com.Expand("https://api.github.com/repos/golang/go/git/trees/master?recursive=1&{cred}", match), &tree); err != nil { return nil, fmt.Errorf("get tree: %v", err) } dirPrefix := "src/" + importPath + "/" dirLevel := len(strings.Split(dirPrefix, "/")) dirLength := len(dirPrefix) dirMap := make(map[string]bool) files := make([]com.RawFile, 0, 10) for _, node := range tree.Tree { // Skip directories and files in irrelevant directories. if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) { continue } // Get files and check if directories have acceptable files. if d, f := path.Split(node.Path); IsDocFile(f) { // Check if file is in the directory that is corresponding to import path. if d == dirPrefix { files = append(files, &Source{ SrcName: f, BrowseUrl: com.Expand("github.com/golang/go/blob/master/{0}", nil, node.Path), RawSrcUrl: com.Expand("https://raw.github.com/golang/go/master/{0}?{1}", nil, node.Path, setting.GitHubCredentials), }) continue } // Otherwise, check if it's a direct sub-directory of import path. if len(strings.Split(d, "/"))-dirLevel == 1 { dirMap[d[dirLength:len(d)-1]] = true continue } } } dirs := base.MapToSortedStrings(dirMap) if len(files) == 0 && len(dirs) == 0 { return nil, ErrPackageNoGoFile } else if err := com.FetchFiles(Client, files, githubRawHeader); err != nil { return nil, fmt.Errorf("fetch files: %v", err) } // Start generating data. w := &Walker{ LineFmt: "#L%d", Pdoc: &Package{ PkgInfo: &models.PkgInfo{ ImportPath: importPath, ProjectPath: "github.com/golang/go", ViewDirPath: "github.com/golang/go/tree/master/src/" + importPath, Etag: commit, IsGoRepo: true, Subdirs: strings.Join(dirs, "|"), }, }, } srcs := make([]*Source, 0, len(files)) srcMap := make(map[string]*Source) for _, f := range files { s := f.(*Source) srcs = append(srcs, s) if !strings.HasSuffix(f.Name(), "_test.go") { srcMap[f.Name()] = s } } pdoc, err := w.Build(&WalkRes{ WalkDepth: WD_All, WalkType: WT_Memory, WalkMode: WM_All, Srcs: srcs, }) if err != nil { return nil, fmt.Errorf("walk package: %v", err) } return pdoc, nil }
// getGithubDoc downloads tarball from github.com. func getGithubDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { match["cred"] = GetGithubCredentials() // Check downlaod type. switch nod.Type { case BRANCH: if len(nod.Value) == 0 { match["sha"] = MASTER // Only get and check revision with the latest version. var refs []*struct { Ref string Url string Object struct { Sha string Type string Url string } } err := com.HttpGetJSON(client, com.Expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), &refs) if err != nil { if strings.Contains(err.Error(), "403") { break } log.Warn("GET", "Fail to get revision") log.Warn("", err.Error()) break } var etag string COMMIT_LOOP: for _, ref := range refs { switch { case strings.HasPrefix(ref.Ref, "refs/heads/master"): etag = ref.Object.Sha break COMMIT_LOOP } } if etag == nod.Revision { log.Log("GET Package hasn't changed: %s", nod.ImportPath) return nil, nil } nod.Revision = etag } else { match["sha"] = nod.Value } case TAG, COMMIT: match["sha"] = nod.Value default: return nil, errors.New("Unknown node type: " + nod.Type) } // We use .zip here. // zip: https://github.com/{owner}/{repo}/archive/{sha}.zip // tarball: https://github.com/{owner}/{repo}/tarball/{sha} // Downlaod archive. p, err := com.HttpGetBytes(client, com.Expand("https://github.com/{owner}/{repo}/archive/{sha}.zip", match), nil) if err != nil { return nil, errors.New("Fail to donwload Github repo -> " + err.Error()) } shaName := com.Expand("{repo}-{sha}", match) if nod.Type == "tag" { shaName = strings.Replace(shaName, "-v", "-", 1) } var installPath string if nod.ImportPath == nod.DownloadURL { suf := "." + nod.Value if len(suf) == 1 { suf = "" } projectPath := com.Expand("github.com/{owner}/{repo}", match) installPath = installRepoPath + "/" + projectPath + suf nod.ImportPath = projectPath } else { installPath = installRepoPath + "/" + nod.ImportPath } // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) r, err := zip.NewReader(bytes.NewReader(p), int64(len(p))) if err != nil { return nil, errors.New(nod.ImportPath + " -> new zip: " + err.Error()) } dirs := make([]string, 0, 5) // Need to add root path because we cannot get from tarball. dirs = append(dirs, installPath+"/") for _, f := range r.File { absPath := strings.Replace(f.Name, shaName, installPath, 1) // Create diretory before create file. os.MkdirAll(path.Dir(absPath)+"/", os.ModePerm) compareDir: switch { case strings.HasSuffix(absPath, "/"): // Directory. // Check if current directory is example. if !(!ctx.Bool("example") && strings.Contains(absPath, "example")) { for _, d := range dirs { if d == absPath { break compareDir } } dirs = append(dirs, absPath) } default: // Get file from archive. r, err := f.Open() if err != nil { return nil, err } fbytes := make([]byte, f.FileInfo().Size()) _, err = io.ReadFull(r, fbytes) if err != nil { return nil, err } if err = com.WriteFile(absPath, fbytes); err != nil { return nil, err } // Set modify time. os.Chtimes(absPath, f.ModTime(), f.ModTime()) } } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d, match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }
func prepareRepoCommit(repo *Repository, tmpDir, repoPath string, opts CreateRepoOptions) error { // Clone to temprory path and do the init commit. _, stderr, err := process.Exec( fmt.Sprintf("initRepository(git clone): %s", repoPath), "git", "clone", repoPath, tmpDir) if err != nil { return fmt.Errorf("git clone: %v - %s", err, stderr) } // README data, err := getRepoInitFile("readme", opts.Readme) if err != nil { return fmt.Errorf("getRepoInitFile[%s]: %v", opts.Readme, err) } cloneLink, err := repo.CloneLink() if err != nil { return fmt.Errorf("CloneLink: %v", err) } match := map[string]string{ "Name": repo.Name, "Description": repo.Description, "CloneURL.SSH": cloneLink.SSH, "CloneURL.HTTPS": cloneLink.HTTPS, } if err = ioutil.WriteFile(filepath.Join(tmpDir, "README.md"), []byte(com.Expand(string(data), match)), 0644); err != nil { return fmt.Errorf("write README.md: %v", err) } // .gitignore if len(opts.Gitignores) > 0 { var buf bytes.Buffer names := strings.Split(opts.Gitignores, ",") for _, name := range names { data, err = getRepoInitFile("gitignore", name) if err != nil { return fmt.Errorf("getRepoInitFile[%s]: %v", name, err) } buf.WriteString("# ---> " + name + "\n") buf.Write(data) buf.WriteString("\n") } if buf.Len() > 0 { if err = ioutil.WriteFile(filepath.Join(tmpDir, ".gitignore"), buf.Bytes(), 0644); err != nil { return fmt.Errorf("write .gitignore: %v", err) } } } // LICENSE if len(opts.License) > 0 { data, err = getRepoInitFile("license", opts.License) if err != nil { return fmt.Errorf("getRepoInitFile[%s]: %v", opts.License, err) } if err = ioutil.WriteFile(filepath.Join(tmpDir, "LICENSE"), data, 0644); err != nil { return fmt.Errorf("write LICENSE: %v", err) } } return nil }
// getGithubDoc downloads tarball from git.oschina.com. func getOSCDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { // Check downlaod type. switch nod.Type { case BRANCH: if len(nod.Value) == 0 { match["sha"] = MASTER } else { match["sha"] = nod.Value } case TAG, COMMIT: match["sha"] = nod.Value default: return nil, errors.New("Unknown node type: " + nod.Type) } // zip: http://{projectRoot}/repository/archive?ref={sha} // Downlaod archive. p, err := com.HttpGetBytes(client, com.Expand("http://git.oschina.net/{owner}/{repo}/repository/archive?ref={sha}", match), nil) if err != nil { return nil, errors.New("Fail to donwload OSChina repo -> " + err.Error()) } var installPath string if nod.ImportPath == nod.DownloadURL { suf := "." + nod.Value if len(suf) == 1 { suf = "" } projectPath := com.Expand("git.oschina.net/{owner}/{repo}", match) installPath = installRepoPath + "/" + projectPath + suf nod.ImportPath = projectPath } else { installPath = installRepoPath + "/" + nod.ImportPath } // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) r, err := zip.NewReader(bytes.NewReader(p), int64(len(p))) if err != nil { return nil, errors.New("Fail to unzip OSChina repo -> " + err.Error()) } nameLen := len(match["repo"]) dirs := make([]string, 0, 5) // Need to add root path because we cannot get from tarball. dirs = append(dirs, installPath+"/") for _, f := range r.File { fileName := f.Name[nameLen+1:] absPath := installPath + "/" + fileName if strings.HasSuffix(absPath, "/") { dirs = append(dirs, absPath) os.MkdirAll(absPath, os.ModePerm) continue } // Get file from archive. r, err := f.Open() if err != nil { return nil, errors.New("Fail to open OSChina repo -> " + err.Error()) } fbytes := make([]byte, f.FileInfo().Size()) _, err = io.ReadFull(r, fbytes) if err != nil { return nil, err } if err = com.WriteFile(absPath, fbytes); err != nil { return nil, err } } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d, match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }
// getBitbucketDoc downloads tarball from bitbucket.org. func getBitbucketDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { // Check version control. if m := bitbucketEtagRe.FindStringSubmatch(nod.Value); m != nil { match["vcs"] = m[1] } else { var repo struct { Scm string } if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil { return nil, err } match["vcs"] = repo.Scm } if nod.Type == BRANCH { if len(nod.Value) == 0 { match["commit"] = defaultTags[match["vcs"]] } else { match["commit"] = nod.Value } } if nod.IsGetDeps { if nod.Type == COMMIT { tags := make(map[string]string) for _, nodeType := range []string{"branches", "tags"} { var nodes map[string]struct { Node string } if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/{0}", match, nodeType), &nodes); err != nil { return nil, err } for t, n := range nodes { tags[t] = n.Node } } // Check revision tag. var err error match["tag"], match["commit"], err = bestTag(tags, defaultTags[match["vcs"]]) if err != nil { return nil, err } nod.Value = match["commit"] } } else { // Check downlaod type. switch nod.Type { case TAG, COMMIT, BRANCH: match["commit"] = nod.Value default: return nil, errors.New("Unknown node type: " + nod.Type) } } // We use .tar.gz here. // zip : https://bitbucket.org/{owner}/{repo}/get/{commit}.zip // tarball : https://bitbucket.org/{owner}/{repo}/get/{commit}.tar.gz // Downlaod archive. p, err := com.HttpGetBytes(client, com.Expand("https://bitbucket.org/{owner}/{repo}/get/{commit}.tar.gz", match), nil) if err != nil { return nil, err } var installPath string if nod.ImportPath == nod.DownloadURL { suf := "." + nod.Value if len(suf) == 1 { suf = "" } projectPath := com.Expand("bitbucket.org/{owner}/{repo}", match) installPath = installRepoPath + "/" + projectPath + suf nod.ImportPath = projectPath } else { installPath = installRepoPath + "/" + nod.ImportPath } // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) gzr, err := gzip.NewReader(bytes.NewReader(p)) if err != nil { return nil, err } defer gzr.Close() tr := tar.NewReader(gzr) var autoPath string // Auto path is the root path that generated by bitbucket.org. // Get source file data. dirs := make([]string, 0, 5) for { h, err := tr.Next() if err == io.EOF { break } else if err != nil { return nil, err } fn := h.Name // In case that we find directory, usually we should not. if strings.HasSuffix(fn, "/") { continue } // Check root path. if len(autoPath) == 0 { autoPath = fn[:strings.Index(fn, "/")] } absPath := strings.Replace(fn, autoPath, installPath, 1) // Create diretory before create file. dir := path.Dir(absPath) if !checkDir(dir, dirs) && !(!ctx.Bool("example") && strings.Contains(absPath, "example")) { dirs = append(dirs, dir) os.MkdirAll(dir+"/", os.ModePerm) } // Get data from archive. fbytes := make([]byte, h.Size) if _, err := io.ReadFull(tr, fbytes); err != nil { return nil, err } _, err = com.SaveFile(absPath, fbytes) if err != nil { return nil, err } // Set modify time. os.Chtimes(absPath, h.AccessTime, h.ModTime) } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d+"/", match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }
// getLaunchpadDoc downloads tarball from launchpad.net. func getLaunchpadDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { if match["project"] != "" && match["series"] != "" { rc, err := com.HttpGet(client, com.Expand("https://code.launchpad.net/{project}{series}/.bzr/branch-format", match), nil) _, isNotFound := err.(com.NotFoundError) switch { case err == nil: rc.Close() // The structure of the import path is launchpad.net/{root}/{dir}. case isNotFound: // The structure of the import path is is launchpad.net/{project}/{dir}. match["repo"] = match["project"] match["dir"] = com.Expand("{series}{dir}", match) default: return nil, err } } var downloadPath string // Check if download with specific revision. if len(nod.Value) == 0 { downloadPath = com.Expand("https://bazaar.launchpad.net/+branch/{repo}/tarball", match) } else { downloadPath = com.Expand("https://bazaar.launchpad.net/+branch/{repo}/tarball/"+nod.Value, match) } // Scrape the repo browser to find the project revision and individual Go files. p, err := com.HttpGetBytes(client, downloadPath, nil) if err != nil { return nil, err } installPath := installRepoPath + "/" + nod.ImportPath // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) gzr, err := gzip.NewReader(bytes.NewReader(p)) if err != nil { return nil, err } defer gzr.Close() tr := tar.NewReader(gzr) var autoPath string // Auto path is the root path that generated by bitbucket.org. // Get source file data. dirs := make([]string, 0, 5) for { h, err := tr.Next() if err == io.EOF { break } else if err != nil { return nil, err } fn := h.Name // Check root path. if len(autoPath) == 0 { autoPath = fn[:strings.Index(fn, match["repo"])+len(match["repo"])] } absPath := strings.Replace(fn, autoPath, installPath, 1) switch { case h.FileInfo().IsDir(): // Directory. // Create diretory before create file. os.MkdirAll(absPath+"/", os.ModePerm) // Check if current directory is example. if !(!ctx.Bool("example") && strings.Contains(absPath, "example")) { dirs = append(dirs, absPath) } case !strings.HasPrefix(fn, "."): // Get data from archive. fbytes := make([]byte, h.Size) if _, err := io.ReadFull(tr, fbytes); err != nil { return nil, err } if err = com.WriteFile(absPath, fbytes); err != nil { return nil, err } } } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d+"/", match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }