func getGoogleTags(importPath string, defaultBranch string, isGoRepo bool) []string { stdout, _, err := com.ExecCmd("curl", "http://"+utils.GetProjectPath(importPath)+"/source/browse") if err != nil { return nil } p := []byte(stdout) page := string(p) start := strings.Index(page, "<strong>Tag:</strong>") if start == -1 { return nil } m := googleTagRe.FindAllStringSubmatch(page[start:], -1) var tags []string if isGoRepo { tags = make([]string, 1, 20) } else { tags = make([]string, len(m)+1) } tags[0] = defaultBranch for i, v := range m { if isGoRepo { if strings.HasPrefix(v[1], "go") { tags = append(tags, v[1]) } continue } tags[i+1] = v[1] } return tags }
func getPkgInfoWithQ(path, tag string, q *qbs.Qbs) (*hv.PkgInfo, error) { // Check path length to reduce connect times. if len(path) == 0 { return nil, errors.New("models.getPkgInfoWithQ -> Empty path as not found.") } pinfo := new(hv.PkgInfo) q.WhereEqual("import_path", path).Find(pinfo) proPath := utils.GetProjectPath(path) if utils.IsGoRepoPath(path) { proPath = "code.google.com/p/go" } beego.Trace("models.getPkgInfoWithQ -> proPath:", proPath) ptag := new(PkgTag) cond := qbs.NewCondition("path = ?", proPath).And("tag = ?", tag) err := q.Condition(cond).Find(ptag) if err != nil { pinfo.Ptag = "ptag" return pinfo, errors.New( fmt.Sprintf("models.getPkgInfoWithQ( %s:%s ) -> 'PkgTag': %s", path, tag, err)) } pinfo.Vcs = ptag.Vcs pinfo.Tags = ptag.Tags // Only 'PkgInfo' cannot prove that package exists, // we have to check 'PkgDecl' as well in case it was deleted by mistake. pdecl := new(PkgDecl) cond = qbs.NewCondition("pid = ?", pinfo.Id).And("tag = ?", tag) err = q.Condition(cond).Find(pdecl) if err != nil { // Basically, error means not found, so we set 'pinfo.PkgVer' to 0 // because server uses it to decide whether force update. pinfo.PkgVer = 0 pinfo.Ptag = "ptag" return pinfo, errors.New( fmt.Sprintf("models.getPkgInfoWithQ( %s:%s ) -> 'PkgDecl': %s", path, tag, err)) } docPath := path + utils.TagSuffix("-", tag) if !com.IsExist("." + utils.DocsJsPath + docPath + ".js") { pinfo.PkgVer = 0 pinfo.Ptag = "ptag" return pinfo, errors.New( fmt.Sprintf("models.getPkgInfoWithQ( %s:%s ) -> JS: File not found", path, tag)) } return pinfo, nil }
// getVCSInfo returns VCS name, project name, project home page, and Upper level project URL. func getVCSInfo(q string, pdoc *doc.Package) (vcs, proName, proPath, pkgDocPath string) { // Get project name. lastIndex := strings.LastIndex(q, "/") proName = q[lastIndex+1:] if i := strings.Index(proName, "?"); i > -1 { proName = proName[:i] } // Project VCS home page. switch { case q[0] == 'c': // code.google.com vcs = "Google Code" if strings.Index(q, "source/") == -1 { proPath = strings.Replace(q, "/"+pdoc.ProjectName, "/"+pdoc.ProjectName+"/source/browse", 1) } else { proPath = q } case q[0] == 'g': // github.com vcs = "Github" if proName != pdoc.ProjectName { // Not root. proName := utils.GetProjectPath(pdoc.ImportPath) proPath = strings.Replace(q, proName, proName+"/tree/master", 1) } else { proPath = q + "/tree/master" } case q[0] == 'b': // bitbucket.org vcs = "BitBucket" if proName != pdoc.ProjectName { // Not root. proPath = strings.Replace(q, "/"+pdoc.ProjectName, "/"+pdoc.ProjectName+"/src/default", 1) } else { proPath = q + "/src/default" } case q[0] == 'l': // launchpad.net vcs = "Launchpad" proPath = "bazaar." + strings.Replace(q, "/"+pdoc.ProjectName, "/+branch/"+pdoc.ProjectName+"/view/head:/", 1) } pkgDocPath = q[:lastIndex] return vcs, proName, proPath, pkgDocPath }
func getCSDNTags(client *http.Client, importPath string) []string { p, err := httpGetBytes(client, "https://"+utils.GetProjectPath(importPath)+"/repository/tags", nil) if err != nil { return nil } tags := make([]string, 1, 6) tags[0] = "master" page := string(p) start := strings.Index(page, "<div class='tab-pane' id='tabs_tags'>") if start > -1 { m := oscTagRe.FindAllStringSubmatch(page[start:], -1) for i, v := range m { tags = append(tags, v[1]) if i == 4 { break } } } return tags }
func getGoogleTags(client *http.Client, importPath string) []string { p, err := httpGetBytes(client, "http://"+utils.GetProjectPath(importPath)+"/source/browse", nil) if err != nil { return nil } tags := make([]string, 1, 6) tags[0] = "master" page := string(p) start := strings.Index(page, "<strong>Tag:</strong>") if start > -1 { m := googleTagRe.FindAllStringSubmatch(page[start:], -1) for i, v := range m { tags = append(tags, v[1]) if i == 4 { break } } } return tags }
func getLaunchpadDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) { if match["project"] != "" && match["series"] != "" { rc, err := httpGet(client, expand("https://code.launchpad.net/{project}{series}/.bzr/branch-format", match), nil) switch { case err == nil: rc.Close() // The structure of the import path is launchpad.net/{root}/{dir}. case isNotFound(err): // The structure of the import path is is launchpad.net/{project}/{dir}. match["repo"] = match["project"] match["dir"] = expand("{series}{dir}", match) default: return nil, err } } // Scrape the repo browser to find the project revision and individual Go files. p, err := httpGetBytes(client, expand("https://bazaar.launchpad.net/+branch/{repo}/tarball", match), nil) if err != nil { return nil, err } // Get source file data. gzr, err := gzip.NewReader(bytes.NewReader(p)) if err != nil { return nil, err } defer gzr.Close() tr := tar.NewReader(gzr) var hash []byte dirPrefix := expand("+branch/{repo}{dir}/", match) preLen := len(dirPrefix) isGoPro := false // Indicates whether it's a Go project. isRootPath := match["importPath"] == utils.GetProjectPath(match["importPath"]) dirs := make([]string, 0, 3) files := make([]*source, 0, 5) for { h, err := tr.Next() if err == io.EOF { break } if err != nil { return nil, err } // Skip directories and files in wrong directories, get them later. if strings.HasSuffix(h.Name, "/") || !strings.HasPrefix(h.Name, dirPrefix) { continue } d, f := path.Split(h.Name) if utils.IsDocFile(f) && utils.FilterDirName(d) { // Check if it's a Go file. if isRootPath && !isGoPro && strings.HasSuffix(f, ".go") { isGoPro = true } // Get file from archive. b := make([]byte, h.Size) if _, err := io.ReadFull(tr, b); err != nil { return nil, err } m := md5.New() m.Write(b) hash = m.Sum(hash) // Check if file is in the directory that is corresponding to import path. if d == dirPrefix { // Yes. if !isRootPath && !isGoPro && strings.HasSuffix(f, ".go") { isGoPro = true } files = append(files, &source{ name: f, browseURL: expand("http://bazaar.launchpad.net/+branch/{repo}/view/head:{dir}/{0}", match, f), data: b}) } else { sd, _ := path.Split(d[preLen:]) sd = strings.TrimSuffix(sd, "/") if !checkDir(sd, dirs) { dirs = append(dirs, sd) } } } } if !isGoPro { return nil, NotFoundError{"Cannot find Go files, it's not a Go project."} } if len(files) == 0 && len(dirs) == 0 { return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."} } sort.Sort(byHash(hash)) m := md5.New() m.Write(hash) hash = m.Sum(hash[:0]) etag := hex.EncodeToString(hash) if etag == savedEtag { return nil, errNotModified } // Start generating data. w := &walker{ lineFmt: "#L%d", pdoc: &Package{ ImportPath: match["importPath"], ProjectName: match["repo"], Etag: etag, Dirs: dirs, }, } return w.build(files) }
// getVCSInfo returns VCS name, project name, project home page, and Upper level project URL. func getVCSInfo(q, tag string, pdoc *doc.Package) (vcs, proName, proPath, pkgDocPath string) { // Get project name. lastIndex := strings.LastIndex(q, "/") proName = q[lastIndex+1:] if i := strings.Index(proName, "?"); i > -1 { proName = proName[:i] } // Project VCS home page. switch { case strings.HasPrefix(q, "github.com"): // github.com vcs = "Github" if len(tag) == 0 { tag = "master" // Set tag. } if proName != pdoc.ProjectName { // Not root. proName := utils.GetProjectPath(pdoc.ImportPath) proPath = strings.Replace(q, proName, proName+"/tree/"+tag, 1) } else { proPath = q + "/tree/" + tag } case strings.HasPrefix(q, "code.google.com"): // code.google.com vcs = "Google Code" if strings.Index(q, "source/") == -1 { proPath = strings.Replace(q, "/"+pdoc.ProjectName, "/"+pdoc.ProjectName+"/source/browse", 1) } else { proPath = q q = strings.Replace(q, "source/browse/", "", 1) lastIndex = strings.LastIndex(q, "/") } proPath += "?r=" + tag // Set tag. case q[0] == 'b': // bitbucket.org vcs = "BitBucket" if len(tag) == 0 { tag = "default" // Set tag. } if proName != pdoc.ProjectName { // Not root. proPath = strings.Replace(q, "/"+pdoc.ProjectName, "/"+pdoc.ProjectName+"/src/"+tag, 1) } else { proPath = q + "/src/" + tag } case q[0] == 'l': // launchpad.net vcs = "Launchpad" proPath = "bazaar." + strings.Replace(q, "/"+pdoc.ProjectName, "/+branch/"+pdoc.ProjectName+"/view/head:/", 1) case strings.HasPrefix(q, "git.oschina.net"): // git.oschina.net vcs = "Git @ OSC" if len(tag) == 0 { tag = "master" // Set tag. } if proName != pdoc.ProjectName { // Not root. proName := utils.GetProjectPath(pdoc.ImportPath) proPath = strings.Replace(q, proName, proName+"/tree/"+tag, 1) } else { proPath = q + "/tree/" + tag } case strings.HasPrefix(q, "code.csdn.net"): // code.csdn.net vcs = "CSDN Code" if len(tag) == 0 { tag = "master" // Set tag. } if proName != pdoc.ProjectName { // Not root. proName := utils.GetProjectPath(pdoc.ImportPath) proPath = strings.Replace(q, proName, proName+"/tree/"+tag, 1) } else { proPath = q + "/tree/" + tag } } pkgDocPath = q[:lastIndex] return vcs, proName, proPath, pkgDocPath }
func getCSDNDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) { if len(tag) == 0 { match["tag"] = "master" } else { match["tag"] = tag } match["projectRoot"] = utils.GetProjectPath(match["importPath"]) // Download zip. p, err := httpGetBytes(client, expand("https://{projectRoot}/repository/archive?ref={tag}", match), nil) if err != nil { return nil, errors.New("doc.getCSDNDoc(" + match["importPath"] + ") -> " + err.Error()) } r, err := zip.NewReader(bytes.NewReader(p), int64(len(p))) if err != nil { return nil, errors.New("doc.getCSDNDoc(" + match["importPath"] + ") -> create zip: " + err.Error()) } commit := r.Comment // Get source file data and subdirectories. nameLen := len(match["importPath"][13:]) dirLen := nameLen + len(match["dir"]) dirs := make([]string, 0, 5) files := make([]*source, 0, 5) for _, f := range r.File { fileName := f.FileInfo().Name() if len(fileName) < dirLen { continue } // File. if utils.IsDocFile(fileName[dirLen+1:]) && strings.LastIndex(fileName, "/") == dirLen { // Get file from archive. rc, err := f.Open() if err != nil { return nil, errors.New("doc.getCSDNDoc(" + match["importPath"] + ") -> open file: " + err.Error()) } p := make([]byte, f.FileInfo().Size()) rc.Read(p) if err != nil { return nil, errors.New("doc.getCSDNDoc(" + match["importPath"] + ") -> read file: " + err.Error()) } files = append(files, &source{ name: fileName[dirLen+1:], browseURL: expand("http://code.csdn.net/{owner}/{repo}/blob/{tag}/{0}", match, fileName[nameLen+1:]), rawURL: expand("http://code.csdn.net/{owner}/{repo}/raw/{tag}/{0}", match, fileName[dirLen+1:]), data: p, }) continue } // Directory. if strings.HasSuffix(fileName, "/") && utils.FilterDirName(fileName[dirLen+1:]) { dirs = append(dirs, fileName[dirLen+1:]) } } if len(files) == 0 && len(dirs) == 0 { return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."} } // Get all tags. tags := getCSDNTags(client, match["importPath"]) // Start generating data. w := &walker{ lineFmt: "#L%d", pdoc: &Package{ ImportPath: match["importPath"], ProjectName: match["repo"], Tags: tags, Tag: tag, Etag: commit, Dirs: dirs, }, } return w.build(files) }
func getLaunchpadDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*hv.Package, error) { if match["project"] != "" && match["series"] != "" { rc, err := com.HttpGet(client, com.Expand("https://code.launchpad.net/{project}{series}/.bzr/branch-format", match), nil) switch { case err == nil: rc.Close() // The structure of the import path is launchpad.net/{root}/{dir}. case isNotFound(err): // The structure of the import path is is launchpad.net/{project}/{dir}. match["repo"] = match["project"] match["dir"] = com.Expand("{series}{dir}", match) default: return nil, err } } // Scrape the repo browser to find the project revision and individual Go files. p, err := com.HttpGetBytes(client, com.Expand("https://bazaar.launchpad.net/+branch/{repo}/tarball", match), nil) if err != nil { return nil, err } // Get source file data. gzr, err := gzip.NewReader(bytes.NewReader(p)) if err != nil { return nil, err } defer gzr.Close() tr := tar.NewReader(gzr) var hash []byte dirPrefix := com.Expand("+branch/{repo}{dir}/", match) preLen := len(dirPrefix) isGoPro := false // Indicates whether it's a Go project. isRootPath := match["importPath"] == utils.GetProjectPath(match["importPath"]) dirs := make([]string, 0, 3) files := make([]com.RawFile, 0, 5) for { h, err := tr.Next() if err == io.EOF { break } if err != nil { return nil, err } // Skip directories and files in wrong directories, get them later. if strings.HasSuffix(h.Name, "/") || !strings.HasPrefix(h.Name, dirPrefix) { continue } d, f := path.Split(h.Name) if utils.IsDocFile(f) && utils.FilterDirName(d) { // Check if it's a Go file. if isRootPath && !isGoPro && strings.HasSuffix(f, ".go") { isGoPro = true } // Get file from archive. b := make([]byte, h.Size) if _, err := io.ReadFull(tr, b); err != nil { return nil, err } m := md5.New() m.Write(b) hash = m.Sum(hash) // Check if file is in the directory that is corresponding to import path. if d == dirPrefix { // Yes. if !isRootPath && !isGoPro && strings.HasSuffix(f, ".go") { isGoPro = true } files = append(files, &hv.Source{ SrcName: f, BrowseUrl: com.Expand("bazaar.launchpad.net/+branch/{repo}/view/head:{dir}/{0}", match, f), SrcData: b}) } else { sd, _ := path.Split(d[preLen:]) sd = strings.TrimSuffix(sd, "/") if !checkDir(sd, dirs) { dirs = append(dirs, sd) } } } } if !isGoPro { return nil, com.NotFoundError{"Cannot find Go files, it's not a Go project."} } if len(files) == 0 && len(dirs) == 0 { return nil, com.NotFoundError{"Directory tree does not contain Go files and subdirs."} } sort.Sort(byHash(hash)) m := md5.New() m.Write(hash) hash = m.Sum(hash[:0]) etag := hex.EncodeToString(hash) if etag == savedEtag { return nil, errNotModified } // Start generating data. w := &hv.Walker{ LineFmt: "#L%d", Pdoc: &hv.Package{ PkgInfo: &hv.PkgInfo{ ImportPath: match["importPath"], ProjectName: match["repo"], ProjectPath: com.Expand("bazaar.launchpad.net/+branch/{repo}/files", match), ViewDirPath: com.Expand("bazaar.launchpad.net/+branch/{repo}/files/head:{dir}/", match), Ptag: etag, Vcs: "Launchpad", }, PkgDecl: &hv.PkgDecl{ Dirs: dirs, }, }, } srcs := make([]*hv.Source, 0, len(files)) srcMap := make(map[string]*hv.Source) for _, f := range files { s, _ := f.(*hv.Source) srcs = append(srcs, s) if !strings.HasSuffix(f.Name(), "_test.go") { srcMap[f.Name()] = s } } pdoc, err := w.Build(&hv.WalkRes{ WalkDepth: hv.WD_All, WalkType: hv.WT_Memory, WalkMode: hv.WM_All, Srcs: srcs, }) if err != nil { return nil, errors.New("doc.getLaunchpadDoc(" + match["importPath"] + ") -> Fail to build: " + err.Error()) } if len(tag) == 0 && w.Pdoc.IsCmd { err = generateHv(match["importPath"], srcMap) } return pdoc, err }
// GetPkgInfo returns 'PkgInfo' by given import path and tag. // It returns error when the package does not exist. func GetPkgInfo(path, tag string) (*hv.PkgInfo, error) { // Check path length to reduce connect times. if len(path) == 0 { return nil, errors.New("models.GetPkgInfo -> Empty path as not found.") } pinfo := &hv.PkgInfo{ImportPath: path} has, err := x.Get(pinfo) if !has || err != nil { return pinfo, errors.New( fmt.Sprintf("models.GetPkgInfo( %s:%s ) -> Get hv.PkgInfo: %v", path, tag, err)) } proPath := utils.GetProjectPath(path) if utils.IsGoRepoPath(path) { proPath = "code.google.com/p/go" } beego.Trace("models.GetPkgInfo -> proPath:", proPath) ptag := &PkgTag{ Path: proPath, Tag: tag, } has, err = x.Get(ptag) if !has || err != nil { pinfo.Ptag = "ptag" return pinfo, errors.New( fmt.Sprintf("models.GetPkgInfo( %s:%s ) -> Get PkgTag: %v", path, tag, err)) } pinfo.Vcs = ptag.Vcs pinfo.Tags = ptag.Tags // Only 'PkgInfo' cannot prove that package exists, // we have to check 'PkgDecl' as well in case it was deleted by mistake. pdecl := &PkgDecl{ Pid: pinfo.Id, Tag: tag, } has, err = x.Get(pdecl) if err != nil { return pinfo, errors.New( fmt.Sprintf("models.GetPkgInfo( %s:%s ) -> Get PkgDecl: %v", path, tag, err)) } if !has { pinfo.PkgVer = 0 pinfo.Ptag = "ptag" return pinfo, errors.New( fmt.Sprintf("models.GetPkgInfo( %s:%s ) -> PkgDecl not exist: %v", path, tag, err)) } docPath := path + utils.TagSuffix("-", tag) if !com.IsExist("." + utils.DocsJsPath + docPath + ".js") { pinfo.PkgVer = 0 pinfo.Ptag = "ptag" return pinfo, errors.New( fmt.Sprintf("models.GetPkgInfo( %s:%s ) -> JS: File not found", path, tag)) } return pinfo, nil }
// SaveProject saves package information, declaration and functions; // update import information. func SaveProject(pinfo *hv.PkgInfo, pdecl *PkgDecl, pfuncs []PkgFunc, imports []string) error { // Load package information(save after checked import information). info := &hv.PkgInfo{ImportPath: pinfo.ImportPath} has, err := x.Get(info) if err != nil { return errors.New( fmt.Sprintf("models.SaveProject( %s ) -> Get hv.PkgInfo: %s", pinfo.ImportPath, err)) } if has { pinfo.Id = info.Id } // ------------------------------ // Update imported information. // ------------------------------ isMaster := pdecl != nil && len(pdecl.Tag) == 0 if info.Id > 0 { // Current package. importeds := strings.Split(info.RefPids, "|") importPids := make([]string, 0, len(importeds)) for _, v := range importeds { pid, _ := strconv.ParseInt(v, 10, 64) if checkImport(info.ImportPath, pid) { importPids = append(importPids, v) } } pinfo.RefPids = strings.Join(importPids, "|") pinfo.RefNum = len(importPids) } if isMaster { pimp := &PkgImport{Path: pinfo.ImportPath} has, err := x.Get(pimp) if err != nil { return errors.New( fmt.Sprintf("models.SaveProject( %s ) -> Get PkgImport: %s", pinfo.ImportPath, err)) } if has { importPids := strings.Split(pinfo.RefPids, "|") pimps := strings.Split(pimp.Imports, "|") for _, v := range pimps { if len(v) == 0 { continue } pid, _ := strconv.ParseInt(v, 10, 64) if i := getRefIndex(importPids, v); i == -1 && checkImport(info.ImportPath, pid) { importPids = append(importPids, v) } } _, err := x.Id(pimp.Id).Delete(pimp) if err != nil { beego.Error("models.SaveProject(", pinfo.ImportPath, ") -> Delete PkgImport:", err.Error()) } pinfo.RefPids = strings.Join(importPids, "|") pinfo.RefNum = len(importPids) if pinfo.RefNum > 0 && strings.HasPrefix(pinfo.RefPids, "|") { pinfo.RefPids = pinfo.RefPids[1:] pinfo.RefNum-- } } } else { pinfo.Ptag = info.Ptag } if has { _, err = x.Id(pinfo.Id).UseBool("is_cgo").Update(pinfo) } else { _, err = x.Insert(pinfo) } if err != nil { beego.Error("models.SaveProject(", pinfo.ImportPath, ") -> Information2:", err) } // Don't need to check standard library and non-master projects. if imports != nil && isMaster && !utils.IsGoRepoPath(pinfo.ImportPath) { // Other packages. for _, v := range imports { if !utils.IsGoRepoPath(v) && v != "C" { // Only count non-standard library. updateImportInfo(v, int(pinfo.Id), int(pinfo.Rank), true) } } } // ------------- END ------------ // Save package declaration. decl := new(PkgDecl) if pdecl != nil { has, err := x.Where("pid = ?", pinfo.Id).And("tag = ?", pdecl.Tag).Get(decl) if err != nil { beego.Error("models.SaveProject(", pinfo.Id, pdecl.Tag, ") -> Get PkgDecl:", err.Error()) } if has { pdecl.Id = decl.Id } pdecl.Pid = pinfo.Id if has { _, err = x.Id(pdecl.Id).Update(pdecl) } else { _, err = x.Insert(pdecl) } if err != nil { beego.Error("models.SaveProject(", pinfo.ImportPath, ") -> Declaration:", err) } // ------------------------------ // Save package tag. // ------------------------------ proPath := utils.GetProjectPath(pinfo.ImportPath) if utils.IsGoRepoPath(pinfo.ImportPath) { proPath = "code.google.com/p/go" } pkgTag := &PkgTag{ Path: proPath, Tag: pdecl.Tag, } has, err = x.Get(pkgTag) if err != nil { beego.Error("models.SaveProject(", proPath, pdecl.Tag, ") -> Get PkgTag:", err) } if !has { pkgTag.Path = proPath pkgTag.Tag = pdecl.Tag } pkgTag.Vcs = pinfo.Vcs pkgTag.Tags = pinfo.Tags if has { _, err = x.Id(pkgTag.Id).Update(pkgTag) } else { _, err = x.Insert(pkgTag) } if err != nil { beego.Error("models.SaveProject(", pinfo.ImportPath, ") -> Save PkgTag:", err) } // ------------- END ------------ } // ------------------------------ // Save package functions. // ------------------------------ if pfuncs != nil { // Old package need to clean old data. if decl.Id > 0 { // Update all old functions' 'IsOle' to be true. type pkgFunc struct { IsOld bool } pfunc := &pkgFunc{IsOld: true} _, err = x.Where("pid = ?", pdecl.Id).UseBool().Update(pfunc) if err != nil { beego.Error("models.SaveProject(", pdecl.Id, ") -> Mark function old:", err) } } // Save new ones. for _, pf := range pfuncs { f := &PkgFunc{ Pid: pdecl.Id, Name: pf.Name, } has, err := x.Get(f) if err != nil { beego.Error("models.SaveProject(", pdecl.Id, ") -> Get PkgFunc:", err) continue } if has { pf.Id = f.Id } pf.Pid = pdecl.Id if has { _, err = x.Id(pf.Id).UseBool().Update(pf) } else { _, err = x.Insert(pf) } if err != nil { beego.Error("models.SaveProject(", pinfo.ImportPath, ") -> Update function(", pf.Name, "):", err) } } if decl.Id > 0 { // Delete old ones if exist. _, err := x.Where("pid = ?", pdecl.Id).And("is_old = ?", true).Delete(new(PkgFunc)) if err != nil { beego.Error("models.SaveProject(", pinfo.ImportPath, ") -> Delete functions:", err) } } } // ------------- END ------------ return nil }
func getGithubDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) { match["cred"] = githubCred // Get master commit. var refs []*struct { Ref string Url string Object struct { Sha string Type string Url string } } err := httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), &refs) if err != nil { if strings.HasPrefix(err.Error(), "Resource not found") { return nil, NotFoundError{"doc.getGithubDoc(" + match["importPath"] + ") -> " + err.Error()} } return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> " + err.Error()) } var commit string // Get all tags. tags := make([]string, 0, 5) for _, ref := range refs { switch { case strings.HasPrefix(ref.Ref, "refs/heads/master"): commit = ref.Object.Sha case strings.HasPrefix(ref.Ref, "refs/tags/"): tags = append(tags, ref.Ref[len("refs/tags/"):]) } } if len(tags) > 5 { tags = tags[len(tags)-5:] } tags = append([]string{"master"}, tags...) if len(tag) == 0 { // Check revision tag. if commit == savedEtag { return nil, errNotModified } match["tag"] = "master" } else { match["tag"] = tag } // Get files. var tree struct { Tree []struct { Url string Path string Type string } Url string } err = httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/trees/{tag}?recursive=1&{cred}", match), &tree) if err != nil { return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> get trees: " + err.Error()) } // Because Github API URLs are case-insensitive, we need to check that the // userRepo returned from Github matches the one that we are requesting. if !strings.HasPrefix(tree.Url, expand("https://api.github.com/repos/{owner}/{repo}/", match)) { return nil, errors.New("Github import path has incorrect case.") } // Get source file data and subdirectories. dirPrefix := match["dir"] if dirPrefix != "" { dirPrefix = dirPrefix[1:] + "/" } preLen := len(dirPrefix) isGoPro := false // Indicates whether it's a Go project. isRootPath := match["importPath"] == utils.GetProjectPath(match["importPath"]) dirs := make([]string, 0, 5) files := make([]*source, 0, 5) for _, node := range tree.Tree { // Skip directories and files in wrong directories, get them later. if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) { continue } // Get files and check if directories have acceptable files. if d, f := path.Split(node.Path); utils.IsDocFile(f) && utils.FilterDirName(d) { // Check if it's a Go file. if isRootPath && !isGoPro && strings.HasSuffix(f, ".go") { isGoPro = true } // Check if file is in the directory that is corresponding to import path. if d == dirPrefix { // Yes. if !isRootPath && !isGoPro && strings.HasSuffix(f, ".go") { isGoPro = true } files = append(files, &source{ name: f, browseURL: expand("https://github.com/{owner}/{repo}/blob/{tag}/{0}", match, node.Path), rawURL: node.Url + "?" + githubCred, }) } else { sd, _ := path.Split(d[preLen:]) sd = strings.TrimSuffix(sd, "/") if !checkDir(sd, dirs) { dirs = append(dirs, sd) } } } } if !isGoPro { return nil, NotFoundError{"Cannot find Go files, it's not a Go project."} } if len(files) == 0 && len(dirs) == 0 { return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."} } // Fetch file from VCS. if err := fetchFiles(client, files, githubRawHeader); err != nil { return nil, err } // Get addtional information: forks, watchers. // var note struct { // Forks int // Watchers int `json:"watchers_count"` // } // err = httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}?{cred}", match), ¬e) // if err != nil { // return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> get note: " + err.Error()) // } // Start generating data. w := &walker{ lineFmt: "#L%d", pdoc: &Package{ ImportPath: match["importPath"], ProjectName: match["repo"], Tags: tags, Tag: tag, Etag: commit, Dirs: dirs, //Note: strconv.Itoa(note.Forks) + "|" + // strconv.Itoa(note.Watchers) + "|", }, } return w.build(files) }
func getOSCDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) { if len(tag) == 0 { match["tag"] = "master" } else { match["tag"] = tag } // Force to lower case. match["importPath"] = strings.ToLower(match["importPath"]) match["projectRoot"] = utils.GetProjectPath(match["importPath"]) // Download zip. p, err := httpGetBytes(client, expand("http://{projectRoot}/repository/archive?ref={tag}", match), nil) if err != nil { return nil, errors.New("doc.getOSCDoc(" + match["importPath"] + ") -> " + err.Error()) } r, err := zip.NewReader(bytes.NewReader(p), int64(len(p))) if err != nil { return nil, errors.New("doc.getOSCDoc(" + match["importPath"] + ") -> create zip: " + err.Error()) } commit := r.Comment // Get source file data and subdirectories. nameLen := len(match["repo"]) dirPrefix := match["dir"] if dirPrefix != "" { dirPrefix = dirPrefix[1:] + "/" } preLen := len(dirPrefix) isGoPro := false // Indicates whether it's a Go project. isRootPath := match["importPath"] == utils.GetProjectPath(match["importPath"]) dirs := make([]string, 0, 5) files := make([]*source, 0, 5) for _, f := range r.File { fileName := f.FileInfo().Name()[nameLen+1:] // Skip directories and files in wrong directories, get them later. if strings.HasSuffix(fileName, "/") || !strings.HasPrefix(fileName, dirPrefix) { continue } // Get files and check if directories have acceptable files. if d, fn := path.Split(fileName); utils.IsDocFile(fn) && utils.FilterDirName(d) { // Check if it's a Go file. if isRootPath && !isGoPro && strings.HasSuffix(fn, ".go") { isGoPro = true } // Check if file is in the directory that is corresponding to import path. if d == dirPrefix { // Yes. if !isRootPath && !isGoPro && strings.HasSuffix(fn, ".go") { isGoPro = true } // Get file from archive. rc, err := f.Open() if err != nil { return nil, errors.New("doc.getOSCDoc(" + match["importPath"] + ") -> open file: " + err.Error()) } p := make([]byte, f.FileInfo().Size()) rc.Read(p) if err != nil { return nil, errors.New("doc.getOSCDoc(" + match["importPath"] + ") -> read file: " + err.Error()) } files = append(files, &source{ name: fn, browseURL: expand("http://git.oschina.net/{owner}/{repo}/blob/{tag}/{0}", match, fileName), rawURL: expand("http://git.oschina.net/{owner}/{repo}/raw/{tag}/{0}", match, fileName[preLen:]), data: p, }) } else { sd, _ := path.Split(d[preLen:]) sd = strings.TrimSuffix(sd, "/") if !checkDir(sd, dirs) { dirs = append(dirs, sd) } } } } if !isGoPro { return nil, NotFoundError{"Cannot find Go files, it's not a Go project."} } if len(files) == 0 && len(dirs) == 0 { return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."} } // Get all tags. tags := getOSCTags(client, match["importPath"]) // Start generating data. w := &walker{ lineFmt: "#L%d", pdoc: &Package{ ImportPath: match["importPath"], ProjectName: match["repo"], Tags: tags, Tag: tag, Etag: commit, Dirs: dirs, }, } return w.build(files) }
// SaveProject saves package information, declaration and functions; // update import information. func SaveProject(pinfo *hv.PkgInfo, pdecl *PkgDecl, pfuncs []*PkgFunc, imports []string) error { q := connDb() defer q.Close() // Load package information(save after checked import information). info := new(hv.PkgInfo) err := q.WhereEqual("import_path", pinfo.ImportPath).Find(info) if err == nil { pinfo.Id = info.Id } // ------------------------------ // Update imported information. // ------------------------------ isMaster := pdecl != nil && len(pdecl.Tag) == 0 if info.Id > 0 { // Current package. importeds := strings.Split(info.RefPids, "|") importPids := make([]string, 0, len(importeds)) for _, v := range importeds { pid, _ := strconv.ParseInt(v, 10, 64) if checkImport(q, info.ImportPath, pid) { importPids = append(importPids, v) } } pinfo.RefPids = strings.Join(importPids, "|") pinfo.RefNum = len(importPids) } if isMaster { pimp := new(PkgImport) err := q.WhereEqual("path", pinfo.ImportPath).Find(pimp) if err == nil { importPids := strings.Split(pinfo.RefPids, "|") pimps := strings.Split(pimp.Imports, "|") for _, v := range pimps { if len(v) == 0 { continue } pid, _ := strconv.ParseInt(v, 10, 64) if i := getRefIndex(importPids, v); i == -1 && checkImport(q, info.ImportPath, pid) { importPids = append(importPids, v) } } q.WhereEqual("id", pimp.Id).Delete(pimp) pinfo.RefPids = strings.Join(importPids, "|") pinfo.RefNum = len(importPids) if pinfo.RefNum > 0 && strings.HasPrefix(pinfo.RefPids, "|") { pinfo.RefPids = pinfo.RefPids[1:] pinfo.RefNum-- } } } else { pinfo.Ptag = info.Ptag } _, err = q.Save(pinfo) if err != nil { beego.Error("models.SaveProject(", pinfo.ImportPath, ") -> Information2:", err) } // Don't need to check standard library and non-master projects. if imports != nil && isMaster && !utils.IsGoRepoPath(pinfo.ImportPath) { // Other packages. for _, v := range imports { if !utils.IsGoRepoPath(v) { // Only count non-standard library. updateImportInfo(q, v, int(pinfo.Id), int(pinfo.Rank), true) } } } // ------------- END ------------ // Save package declaration. decl := new(PkgDecl) if pdecl != nil { cond := qbs.NewCondition("pid = ?", pinfo.Id).And("tag = ?", pdecl.Tag) err = q.Condition(cond).Find(decl) if err == nil { pdecl.Id = decl.Id } pdecl.Pid = pinfo.Id _, err = q.Save(pdecl) if err != nil { beego.Error("models.SaveProject(", pinfo.ImportPath, ") -> Declaration:", err) } // ------------------------------ // Save package tag. // ------------------------------ proPath := utils.GetProjectPath(pinfo.ImportPath) if utils.IsGoRepoPath(pinfo.ImportPath) { proPath = "code.google.com/p/go" } pkgTag := new(PkgTag) cond = qbs.NewCondition("path = ?", proPath).And("tag = ?", pdecl.Tag) err = q.Condition(cond).Find(pkgTag) if err != nil { pkgTag.Path = proPath pkgTag.Tag = pdecl.Tag } pkgTag.Vcs = pinfo.Vcs pkgTag.Tags = pinfo.Tags _, err = q.Save(pkgTag) if err != nil { beego.Error("models.SaveProject(", pinfo.ImportPath, ") -> PkgTag:", err) } // ------------- END ------------ } // ------------------------------ // Save package functions. // ------------------------------ if pfuncs != nil { // Old package need to clean old data. if decl.Id > 0 { // Update all old functions' 'IsOle' to be true. type pkgFunc struct { IsOld bool } pfunc := new(pkgFunc) pfunc.IsOld = true _, err = q.WhereEqual("pid", pdecl.Id).Update(pfunc) } // Save new ones. for _, pf := range pfuncs { f := new(PkgFunc) cond := qbs.NewCondition("pid = ?", pdecl.Id).And("name = ?", pf.Name) err = q.Condition(cond).Find(f) if err == nil { pf.Id = f.Id } pf.Pid = pdecl.Id _, err = q.Save(pf) if err != nil { beego.Error("models.SaveProject(", pinfo.ImportPath, ") -> Update function(", pf.Name, "):", err) } } if decl.Id > 0 { // Delete old ones if exist. cond := qbs.NewCondition("pid = ?", pdecl.Id).And("is_old = ?", true) _, err = q.Condition(cond).Delete(new(PkgFunc)) if err != nil { beego.Error("models.SaveProject(", pinfo.ImportPath, ") -> Delete functions:", err) } } } // ------------- END ------------ return nil }