func getBitbucketDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) { if m := bitbucketEtagRe.FindStringSubmatch(savedEtag); m != nil { match["vcs"] = m[1] } else { var repo struct { Scm string } if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil { return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> " + err.Error()) } match["vcs"] = repo.Scm } // Get master commit. var branches map[string]struct { Node string } if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/branches", match), &branches); err != nil { return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get branches: " + err.Error()) } match["commit"] = branches["default"].Node // Get all tags. tags := make([]string, 0, 5) var nodes map[string]struct { Node string } if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/tags", match), &nodes); err != nil { return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get nodes: " + err.Error()) } for k := range nodes { tags = append(tags, k) } if len(tags) > 5 { tags = tags[len(tags)-5:] } tags = append([]string{defaultTags[match["vcs"]]}, tags...) var etag string if len(tag) == 0 { // Check revision tag. etag = expand("{vcs}-{commit}", match) if etag == savedEtag { return nil, errNotModified } match["tag"] = defaultTags[match["vcs"]] } else { match["tag"] = tag } // Get files and directories. var node struct { Files []struct { Path string } Directories []string } if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/src/{tag}{dir}/", match), &node); err != nil { return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get trees: " + err.Error()) } // Get source file data. files := make([]*source, 0, 5) for _, f := range node.Files { _, name := path.Split(f.Path) if utils.IsDocFile(name) { files = append(files, &source{ name: name, browseURL: expand("https://bitbucket.org/{owner}/{repo}/src/{tag}/{0}", match, f.Path), rawURL: expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/raw/{tag}/{0}", match, f.Path), }) } } // Get subdirectories. dirs := make([]string, 0, len(node.Directories)) for _, d := range node.Directories { if utils.FilterFileName(d) { dirs = append(dirs, d) } } if len(files) == 0 && len(dirs) == 0 { return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."} } // Fetch file from VCS. if err := fetchFiles(client, files, nil); err != nil { return nil, err } // Start generating data. w := &walker{ lineFmt: "#cl-%d", pdoc: &Package{ ImportPath: match["importPath"], ProjectName: match["repo"], Tags: tags, Tag: tag, Etag: etag, Dirs: dirs, }, } return w.build(files) }
func getGithubDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) { match["cred"] = githubCred // Get master commit. var refs []*struct { Ref string Url string Object struct { Sha string Type string Url string } } err := httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), &refs) if err != nil { return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> " + err.Error()) } var commit string // Get all tags. tags := make([]string, 0, 5) for _, ref := range refs { switch { case strings.HasPrefix(ref.Ref, "refs/heads/master"): commit = ref.Object.Sha case strings.HasPrefix(ref.Ref, "refs/tags/"): tags = append(tags, ref.Ref[len("refs/tags/"):]) } } if len(tags) > 5 { tags = tags[len(tags)-5:] } tags = append([]string{"master"}, tags...) if len(tag) == 0 { // Check revision tag. if commit == savedEtag { return nil, errNotModified } match["tag"] = "master" } else { match["tag"] = tag } // Get files and subdirectories. var tree struct { Tree []struct { Url string Path string Type string } Url string } err = httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/trees/{tag}?recursive=1&{cred}", match), &tree) if err != nil { return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> get tress: " + err.Error()) } // Because Github API URLs are case-insensitive, we need to check that the // userRepo returned from Github matches the one that we are requesting. if !strings.HasPrefix(tree.Url, expand("https://api.github.com/repos/{owner}/{repo}/", match)) { return nil, NotFoundError{"Github import path has incorrect case."} } dirPrefix := match["dir"] if dirPrefix != "" { dirPrefix = dirPrefix[1:] + "/" } preLen := len(dirPrefix) // Get source file data and subdirectories. dirs := make([]string, 0, 5) files := make([]*source, 0, 5) for _, node := range tree.Tree { if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) { if len(dirPrefix) > 0 && strings.HasPrefix(node.Path, dirPrefix) { p := node.Path[preLen:] dirs = append(dirs, p) } else if len(dirPrefix) == 0 && strings.Index(node.Path, "/") == -1 && utils.FilterFileName(node.Path) { dirs = append(dirs, node.Path) } continue } if d, f := path.Split(node.Path); d == dirPrefix && utils.IsDocFile(f) { files = append(files, &source{ name: f, browseURL: expand("https://github.com/{owner}/{repo}/blob/{tag}/{0}", match, node.Path), rawURL: node.Url + "?" + githubCred, }) } } if len(files) == 0 && len(dirs) == 0 { return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."} } // Fetch file from VCS. if err := fetchFiles(client, files, githubRawHeader); err != nil { return nil, err } /*browseURL := expand("https://github.com/{owner}/{repo}", match) if match["dir"] != "" { browseURL = expand("https://github.com/{owner}/{repo}/tree/{tag}{dir}", match) }*/ // Start generating data. w := &walker{ lineFmt: "#L%d", pdoc: &Package{ ImportPath: match["importPath"], ProjectName: match["repo"], Tags: tags, Tag: tag, Etag: commit, Dirs: dirs, }, } return w.build(files) }
func getStandardDoc(client *http.Client, importPath, tag, savedEtag string) (pdoc *Package, err error) { // hg-higtory: http://go.googlecode.com/hg-history/release/src/pkg/"+importPath+"/" p, err := httpGetBytes(client, "http://go.googlecode.com/hg/src/pkg/"+importPath+"/?r="+tag, nil) if err != nil { return nil, errors.New("doc.getStandardDoc(" + importPath + ") -> " + err.Error()) } // Check revision tag. var etag string if m := googleRevisionRe.FindSubmatch(p); m == nil { return nil, errors.New("doc.getStandardDoc(" + importPath + ") -> Could not find revision") } else { etag = string(m[1]) if etag == savedEtag { return nil, errNotModified } } // Get source file data. files := make([]*source, 0, 5) for _, m := range googleFileRe.FindAllSubmatch(p, -1) { fname := strings.Split(string(m[1]), "?")[0] if utils.IsDocFile(fname) { files = append(files, &source{ name: fname, browseURL: "http://code.google.com/p/go/source/browse/src/pkg/" + importPath + "/" + fname + "?r=" + tag, rawURL: "http://go.googlecode.com/hg/src/pkg/" + importPath + "/" + fname + "?r=" + tag, }) } } dirs := make([]string, 0, 5) // Get subdirectories. for _, m := range googleDirRe.FindAllSubmatch(p, -1) { dirName := strings.Split(string(m[1]), "?")[0] // Make sure we get directories. if strings.HasSuffix(dirName, "/") && utils.FilterFileName(dirName) { dirs = append(dirs, strings.Replace(dirName, "/", "", -1)) } } if len(files) == 0 && len(dirs) == 0 { return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."} } // Fetch file from VCS. if err := fetchFiles(client, files, nil); err != nil { return nil, err } // Get all tags. tags := getGoogleTags(client, "code.google.com/p/go/"+importPath) // Start generating data. w := &walker{ lineFmt: "#%d", pdoc: &Package{ ImportPath: importPath, ProjectName: "Go", Tags: tags, Tag: tag, Etag: etag, Dirs: dirs, }, } return w.build(files) }
func getCSDNDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) { if len(tag) == 0 { match["tag"] = "master" } else { match["tag"] = tag } match["projectRoot"] = utils.GetProjectPath(match["importPath"]) // Download zip. p, err := httpGetBytes(client, expand("https://{projectRoot}/repository/archive?ref={tag}", match), nil) if err != nil { return nil, errors.New("doc.getCSDNDoc(" + match["importPath"] + ") -> " + err.Error()) } r, err := zip.NewReader(bytes.NewReader(p), int64(len(p))) if err != nil { return nil, errors.New("doc.getCSDNDoc(" + match["importPath"] + ") -> create zip: " + err.Error()) } commit := r.Comment // Get source file data and subdirectories. nameLen := len(match["importPath"][13:]) dirLen := nameLen + len(match["dir"]) dirs := make([]string, 0, 5) files := make([]*source, 0, 5) for _, f := range r.File { fileName := f.FileInfo().Name() if len(fileName) < dirLen { continue } // File. if utils.IsDocFile(fileName[dirLen+1:]) && strings.LastIndex(fileName, "/") == dirLen { // Get file from archive. rc, err := f.Open() if err != nil { return nil, errors.New("doc.getCSDNDoc(" + match["importPath"] + ") -> open file: " + err.Error()) } p := make([]byte, f.FileInfo().Size()) rc.Read(p) if err != nil { return nil, errors.New("doc.getCSDNDoc(" + match["importPath"] + ") -> read file: " + err.Error()) } files = append(files, &source{ name: fileName[dirLen+1:], browseURL: expand("http://code.csdn.net/{owner}/{repo}/blob/{tag}/{0}", match, fileName[nameLen+1:]), rawURL: expand("http://code.csdn.net/{owner}/{repo}/raw/{tag}/{0}", match, fileName[dirLen+1:]), data: p, }) continue } // Directory. if strings.HasSuffix(fileName, "/") && utils.FilterFileName(fileName[dirLen+1:]) { dirs = append(dirs, fileName[dirLen+1:]) } } if len(files) == 0 && len(dirs) == 0 { return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."} } // Get all tags. tags := getCSDNTags(client, match["importPath"]) // Start generating data. w := &walker{ lineFmt: "#L%d", pdoc: &Package{ ImportPath: match["importPath"], ProjectName: match["repo"], Tags: tags, Tag: tag, Etag: commit, Dirs: dirs, }, } return w.build(files) }
func getGoogleDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) { setupGoogleMatch(match) if m := googleEtagRe.FindStringSubmatch(savedEtag); m != nil { match["vcs"] = m[1] } else if err := getGoogleVCS(client, match); err != nil { return nil, err } match["tag"] = tag // Scrape the repo browser to find the project revision and individual Go files. p, err := httpGetBytes(client, expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/?r={tag}", match), nil) if err != nil { return nil, errors.New("doc.getGoogleDoc(" + match["importPath"] + ") -> " + err.Error()) } // Check revision tag. var etag string if m := googleRevisionRe.FindSubmatch(p); m == nil { return nil, errors.New("doc.getGoogleDoc(" + match["importPath"] + ") -> Could not find revision") } else { etag = expand("{vcs}-{0}", match, string(m[1])) if etag == savedEtag { return nil, errNotModified } } // Get source file data. files := make([]*source, 0, 5) for _, m := range googleFileRe.FindAllSubmatch(p, -1) { fname := string(m[1]) if utils.IsDocFile(fname) { files = append(files, &source{ name: fname, browseURL: expand("http://code.google.com/p/{repo}/source/browse{dir}/{0}{query}?r={tag}", match, fname), rawURL: expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/{0}?r={tag}", match, fname), }) } } dirs := make([]string, 0, 5) // Get subdirectories. for _, m := range googleDirRe.FindAllSubmatch(p, -1) { dirName := strings.Split(string(m[1]), "?")[0] // Make sure we get directories. if strings.HasSuffix(dirName, "/") && utils.FilterFileName(dirName) { dirs = append(dirs, strings.Replace(dirName, "/", "", -1)) } } if len(files) == 0 && len(dirs) == 0 { return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."} } // Fetch file from VCS. if err := fetchFiles(client, files, nil); err != nil { return nil, err } // Get all tags. tags := getGoogleTags(client, match["importPath"]) // Start generating data. w := &walker{ lineFmt: "#%d", pdoc: &Package{ ImportPath: match["importPath"], ProjectName: expand("{repo}{dot}{subrepo}", match), Tags: tags, Tag: tag, Etag: etag, Dirs: dirs, }, } return w.build(files) }