func fetchGoogleFiles(client *http.Client, files []com.RawFile) error { count := len(files) step := 5 start := 0 end := step isExit := false for { if end > count { end = count isExit = true } if err := com.FetchFiles(client, files[start:end], nil); err != nil { return err } if isExit { return nil } start += step end += step } }
func ExampleFetchFiles() { // Code that should be outside of your function body. // type rawFile struct { // name string // rawURL string // data []byte // } // func (rf *rawFile) Name() string { // return rf.name // } // func (rf *rawFile) RawUrl() string { // return rf.rawURL // } // func (rf *rawFile) Data() []byte { // return rf.data // } // func (rf *rawFile) SetData(p []byte) { // rf.data = p // } files := []com.RawFile{ &rawFile{rawURL: "http://example.com"}, &rawFile{rawURL: "http://example.com/foo"}, } err := com.FetchFiles(&http.Client{}, files, nil) fmt.Println(err, len(files[0].Data()), len(files[1].Data())) }
func getGolangDoc(importPath, etag string) (*Package, error) { match := map[string]string{ "cred": setting.GitHubCredentials, } // Check revision. commit, err := getGithubRevision("github.com/golang/go") if err != nil { return nil, fmt.Errorf("get revision: %v", err) } if commit == etag { return nil, ErrPackageNotModified } // Get files. var tree struct { Tree []struct { Url string Path string Type string } Url string } if err := com.HttpGetJSON(Client, com.Expand("https://api.github.com/repos/golang/go/git/trees/master?recursive=1&{cred}", match), &tree); err != nil { return nil, fmt.Errorf("get tree: %v", err) } dirPrefix := "src/" + importPath + "/" dirLevel := len(strings.Split(dirPrefix, "/")) dirLength := len(dirPrefix) dirMap := make(map[string]bool) files := make([]com.RawFile, 0, 10) for _, node := range tree.Tree { // Skip directories and files in irrelevant directories. if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) { continue } // Get files and check if directories have acceptable files. if d, f := path.Split(node.Path); IsDocFile(f) { // Check if file is in the directory that is corresponding to import path. if d == dirPrefix { files = append(files, &Source{ SrcName: f, BrowseUrl: com.Expand("github.com/golang/go/blob/master/{0}", nil, node.Path), RawSrcUrl: com.Expand("https://raw.github.com/golang/go/master/{0}?{1}", nil, node.Path, setting.GitHubCredentials), }) continue } // Otherwise, check if it's a direct sub-directory of import path. if len(strings.Split(d, "/"))-dirLevel == 1 { dirMap[d[dirLength:len(d)-1]] = true continue } } } dirs := base.MapToSortedStrings(dirMap) if len(files) == 0 && len(dirs) == 0 { return nil, ErrPackageNoGoFile } else if err := com.FetchFiles(Client, files, githubRawHeader); err != nil { return nil, fmt.Errorf("fetch files: %v", err) } // Start generating data. w := &Walker{ LineFmt: "#L%d", Pdoc: &Package{ PkgInfo: &models.PkgInfo{ ImportPath: importPath, ProjectPath: "github.com/golang/go", ViewDirPath: "github.com/golang/go/tree/master/src/" + importPath, Etag: commit, IsGoRepo: true, Subdirs: strings.Join(dirs, "|"), }, }, } srcs := make([]*Source, 0, len(files)) srcMap := make(map[string]*Source) for _, f := range files { s := f.(*Source) srcs = append(srcs, s) if !strings.HasSuffix(f.Name(), "_test.go") { srcMap[f.Name()] = s } } pdoc, err := w.Build(&WalkRes{ WalkDepth: WD_All, WalkType: WT_Memory, WalkMode: WM_All, Srcs: srcs, }) if err != nil { return nil, fmt.Errorf("walk package: %v", err) } return pdoc, nil }
func checkFileUpdates() { log.Debug("Checking file updates") type tree struct { AppName, ApiUrl, RawUrl, TreeName, Prefix string } trees := make([]*tree, len(setting.Apps)) for i, app := range setting.Apps { trees[i] = &tree{ AppName: app.Name, ApiUrl: "https://api.github.com/repos/" + app.RepoName + "/git/trees/master?recursive=1&" + setting.GithubCred, RawUrl: "https://raw.github.com/" + app.RepoName + "/master/", TreeName: "conf/docTree_" + app.Name + ".json", Prefix: "docs/" + app.Name + "/", } } for _, tree := range trees { var tmpTree struct { Tree []*oldDocNode } if err := com.HttpGetJSON(httpClient, tree.ApiUrl, &tmpTree); err != nil { log.Error("Fail to get trees: %v", err) return } var saveTree struct { Tree []*oldDocNode } saveTree.Tree = make([]*oldDocNode, 0, len(tmpTree.Tree)) // Compare SHA. files := make([]com.RawFile, 0, len(tmpTree.Tree)) for _, node := range tmpTree.Tree { // Skip non-md files and "README.md". if node.Type != "blob" || (!strings.HasSuffix(node.Path, ".md") && !strings.Contains(node.Path, "images") && !strings.HasSuffix(node.Path, ".json")) || strings.HasPrefix(strings.ToLower(node.Path), "readme") { continue } name := strings.TrimSuffix(node.Path, ".md") if checkSHA(tree.AppName, name, node.Sha, tree.Prefix) { log.Info("Need to update: %s", name) files = append(files, &rawFile{ name: name, rawURL: tree.RawUrl + node.Path, }) } saveTree.Tree = append(saveTree.Tree, &oldDocNode{ Path: name, Sha: node.Sha, }) // For save purpose, reset name. node.Path = name } // Fetch files. if err := com.FetchFiles(httpClient, files, nil); err != nil { log.Error("Fail to fetch files: %v", err) return } // Update data. for _, f := range files { os.MkdirAll(path.Join(tree.Prefix, path.Dir(f.Name())), os.ModePerm) suf := ".md" if strings.Contains(f.Name(), "images") || strings.HasSuffix(f.Name(), ".json") { suf = "" } fw, err := os.Create(tree.Prefix + f.Name() + suf) if err != nil { log.Error("Fail to open file: %v", err) continue } _, err = fw.Write(f.Data()) fw.Close() if err != nil { log.Error("Fail to write data: %v", err) continue } } // Save documentation information. f, err := os.Create(tree.TreeName) if err != nil { log.Error("Fail to save data: %v", err) return } e := json.NewEncoder(f) err = e.Encode(&saveTree) if err != nil { log.Error("Fail to encode data: %v", err) return } f.Close() } log.Debug("Finish check file updates") for _, app := range setting.Apps { parseDocs(app.Name) initDocMap(app.Name) } }
func checkDocUpdates() { beego.Trace("Checking documentation updates") var tmpTree struct { Tree []*docNode } err := com.HttpGetJSON(httpClient, "https://api.github.com/repos/beego/beedoc/git/trees/master?recursive=1&"+githubCred, &tmpTree) if err != nil { beego.Error("models.checkDocUpdates -> get trees:", err.Error()) return } // Compare SHA. files := make([]com.RawFile, 0, len(tmpTree.Tree)) for _, node := range tmpTree.Tree { // Skip non-md files and "README.MD". if !strings.HasSuffix(node.Path, ".md") || node.Path == "README.md" { continue } // Trim ".md". name := node.Path[:len(node.Path)-3] if checkSHA(name, node.Sha) { beego.Info("Need to update:", name) files = append(files, &rawFile{ name: name, rawURL: "https://raw.github.com/beego/beedoc/master/" + node.Path, }) } // For save purpose, reset name. node.Path = name } // Fetch files. if err := com.FetchFiles(httpClient, files, nil); err != nil { beego.Error("models.checkDocUpdates -> fetch files:", err.Error()) return } // Update data. for _, f := range files { fw, err := os.Create("docs/" + f.Name() + ".md") if err != nil { beego.Error("models.checkDocUpdates -> open file:", err.Error()) return } _, err = fw.Write(f.Data()) fw.Close() if err != nil { beego.Error("models.checkDocUpdates -> write data:", err.Error()) return } } beego.Trace("Finish check documentation updates") initDocMap() // Save documentation information. f, err := os.Create("conf/docTree.json") if err != nil { beego.Error("models.checkDocUpdates -> save data:", err.Error()) return } defer f.Close() e := json.NewEncoder(f) err = e.Encode(&tmpTree) if err != nil { beego.Error("models.checkDocUpdates -> encode data:", err.Error()) return } }
func getBitbucketDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*hv.Package, error) { if m := bitbucketEtagRe.FindStringSubmatch(savedEtag); m != nil { match["vcs"] = m[1] } else { var repo struct { Scm string } if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil { return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> " + err.Error()) } match["vcs"] = repo.Scm } // Get master commit. var branches map[string]struct { Node string } if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/branches", match), &branches); err != nil { return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get branches: " + err.Error()) } match["commit"] = branches["default"].Node // Get all tags. tags := make([]string, 0, 5) var nodes map[string]struct { Node string } if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/tags", match), &nodes); err != nil { return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get nodes: " + err.Error()) } for k := range nodes { tags = append(tags, k) } if len(tags) > 0 { tags = append([]string{defaultTags[match["vcs"]]}, tags...) } var etag string if len(tag) == 0 { // Check revision tag. etag = match["commit"] if etag == savedEtag { return nil, errNotModified } match["tag"] = defaultTags[match["vcs"]] } else { match["tag"] = tag } // Get files and directories. var node struct { Files []struct { Path string } Directories []string } if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/src/{tag}{dir}/", match), &node); err != nil { return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get trees: " + err.Error()) } // Get source file data. files := make([]com.RawFile, 0, 5) for _, f := range node.Files { _, name := path.Split(f.Path) if utils.IsDocFile(name) { files = append(files, &hv.Source{ SrcName: name, BrowseUrl: com.Expand("bitbucket.org/{owner}/{repo}/src/{tag}/{0}", match, f.Path), RawSrcUrl: com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/raw/{tag}/{0}", match, f.Path), }) } } // Get subdirectories. dirs := make([]string, 0, len(node.Directories)) for _, d := range node.Directories { if utils.FilterDirName(d) { dirs = append(dirs, d) } } if len(files) == 0 && len(dirs) == 0 { return nil, com.NotFoundError{"Directory tree does not contain Go files and subdirs."} } // Fetch file from VCS. if err := com.FetchFiles(client, files, nil); err != nil { return nil, err } // Start generating data. w := &hv.Walker{ LineFmt: "#cl-%d", Pdoc: &hv.Package{ PkgInfo: &hv.PkgInfo{ ImportPath: match["importPath"], ProjectName: match["repo"], ProjectPath: com.Expand("bitbucket.org/{owner}/{repo}/src/{tag}/", match), ViewDirPath: com.Expand("bitbucket.org/{owner}/{repo}/src/{tag}{dir}/", match), Tags: strings.Join(tags, "|||"), Ptag: etag, Vcs: "BitBucket", }, PkgDecl: &hv.PkgDecl{ Tag: tag, Dirs: dirs, }, }, } srcs := make([]*hv.Source, 0, len(files)) srcMap := make(map[string]*hv.Source) for _, f := range files { s, _ := f.(*hv.Source) srcs = append(srcs, s) if !strings.HasSuffix(f.Name(), "_test.go") { srcMap[f.Name()] = s } } pdoc, err := w.Build(&hv.WalkRes{ WalkDepth: hv.WD_All, WalkType: hv.WT_Memory, WalkMode: hv.WM_All, Srcs: srcs, }) if err != nil { return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> Fail to build: " + err.Error()) } if len(tag) == 0 && w.Pdoc.IsCmd { err = generateHv(match["importPath"], srcMap) } return pdoc, err }
func getGithubDoc(match map[string]string, etag string) (*Package, error) { match["cred"] = setting.GitHubCredentials // Check revision. commit, err := getGithubRevision(com.Expand("github.com/{owner}/{repo}", match)) if err != nil { return nil, fmt.Errorf("get revision: %v", err) } if commit == etag { return nil, ErrPackageNotModified } // Get files. var tree struct { Tree []struct { Url string Path string Type string } Url string } if err := com.HttpGetJSON(Client, com.Expand("https://api.github.com/repos/{owner}/{repo}/git/trees/master?recursive=1&{cred}", match), &tree); err != nil { return nil, fmt.Errorf("get tree: %v", err) } // Because Github API URLs are case-insensitive, we need to check that the // userRepo returned from Github matches the one that we are requesting. if !strings.HasPrefix(tree.Url, com.Expand("https://api.github.com/repos/{owner}/{repo}/", match)) { return nil, errors.New("GitHub import path has incorrect case") } // Get source file data and subdirectories. dirPrefix := match["dir"] if dirPrefix != "" { dirPrefix = dirPrefix[1:] + "/" } dirLevel := len(strings.Split(dirPrefix, "/")) dirLength := len(dirPrefix) dirMap := make(map[string]bool) files := make([]com.RawFile, 0, 10) for _, node := range tree.Tree { // Skip directories and files in wrong directories, get them later. if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) { continue } // Get files and check if directories have acceptable files. if d, f := path.Split(node.Path); IsDocFile(f) { // Check if file is in the directory that is corresponding to import path. if d == dirPrefix { files = append(files, &Source{ SrcName: f, BrowseUrl: com.Expand("github.com/{owner}/{repo}/blob/master/{0}", match, node.Path), RawSrcUrl: com.Expand("https://raw.github.com/{owner}/{repo}/master/{0}?{1}", match, node.Path, setting.GitHubCredentials), }) continue } // Otherwise, check if it's a direct sub-directory of import path. if len(strings.Split(d, "/"))-dirLevel == 1 { dirMap[d[dirLength:len(d)-1]] = true continue } } } dirs := base.MapToSortedStrings(dirMap) if len(files) == 0 && len(dirs) == 0 { return nil, ErrPackageNoGoFile } else if err := com.FetchFiles(Client, files, githubRawHeader); err != nil { return nil, fmt.Errorf("fetch files: %v", err) } // Start generating data. w := &Walker{ LineFmt: "#L%d", Pdoc: &Package{ PkgInfo: &models.PkgInfo{ ImportPath: match["importPath"], ProjectPath: com.Expand("github.com/{owner}/{repo}", match), ViewDirPath: com.Expand("github.com/{owner}/{repo}/tree/master/{importPath}", match), Etag: commit, Subdirs: strings.Join(dirs, "|"), }, }, } srcs := make([]*Source, 0, len(files)) srcMap := make(map[string]*Source) for _, f := range files { s, _ := f.(*Source) srcs = append(srcs, s) if !strings.HasSuffix(f.Name(), "_test.go") { srcMap[f.Name()] = s } } pdoc, err := w.Build(&WalkRes{ WalkDepth: WD_All, WalkType: WT_Memory, WalkMode: WM_All, Srcs: srcs, }) if err != nil { return nil, fmt.Errorf("error walking package: %v", err) } return pdoc, nil }
func checkFileUpdates() { beego.Trace("Checking file updates") type tree struct { ApiUrl, RawUrl, TreeName, Prefix string } var trees = []*tree{ { ApiUrl: "https://api.github.com/repos/beego/beedoc/git/trees/master?recursive=1&" + githubCred, RawUrl: "https://raw.github.com/beego/beedoc/master/", TreeName: "conf/docTree.json", Prefix: "docs/", }, { ApiUrl: "https://api.github.com/repos/beego/beeblog/git/trees/master?recursive=1&" + githubCred, RawUrl: "https://raw.github.com/beego/beeblog/master/", TreeName: "conf/blogTree.json", Prefix: "blog/", }, } for _, tree := range trees { var tmpTree struct { Tree []*docNode } err := com.HttpGetJSON(httpClient, tree.ApiUrl, &tmpTree) if err != nil { beego.Error("models.checkFileUpdates -> get trees:", err.Error()) return } var saveTree struct { Tree []*docNode } saveTree.Tree = make([]*docNode, 0, len(tmpTree.Tree)) // Compare SHA. files := make([]com.RawFile, 0, len(tmpTree.Tree)) for _, node := range tmpTree.Tree { // Skip non-md files and "README.md". if !strings.HasSuffix(node.Path, ".md") || node.Path == "README.md" { continue } // Trim ".md". name := node.Path[:len(node.Path)-3] if checkSHA(name, node.Sha, tree.Prefix) { beego.Info("Need to update:", name) files = append(files, &rawFile{ name: name, rawURL: tree.RawUrl + node.Path, }) } saveTree.Tree = append(saveTree.Tree, &docNode{ Path: name, Sha: node.Sha, }) // For save purpose, reset name. node.Path = name } // Fetch files. if err := com.FetchFiles(httpClient, files, nil); err != nil { beego.Error("models.checkFileUpdates -> fetch files:", err.Error()) return } // Update data. for _, f := range files { fw, err := os.Create(tree.Prefix + f.Name() + ".md") if err != nil { beego.Error("models.checkFileUpdates -> open file:", err.Error()) continue } _, err = fw.Write(f.Data()) fw.Close() if err != nil { beego.Error("models.checkFileUpdates -> write data:", err.Error()) continue } } // Save documentation information. f, err := os.Create(tree.TreeName) if err != nil { beego.Error("models.checkFileUpdates -> save data:", err.Error()) return } e := json.NewEncoder(f) err = e.Encode(&saveTree) if err != nil { beego.Error("models.checkFileUpdates -> encode data:", err.Error()) return } f.Close() } beego.Trace("Finish check file updates") initMaps() }
func getGithubDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*hv.Package, error) { match["cred"] = githubCred // Get master commit. var refs []*struct { Ref string Url string Object struct { Sha string Type string Url string } } err := com.HttpGetJSON(client, com.Expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), &refs) if err != nil { if strings.HasPrefix(err.Error(), "Resource not found") { return nil, com.NotFoundError{"doc.getGithubDoc(" + match["importPath"] + ") -> " + err.Error()} } return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> " + err.Error()) } var commit string // Get all tags. tags := make([]string, 0, 5) for _, ref := range refs { switch { case strings.HasPrefix(ref.Ref, "refs/heads/master"): commit = ref.Object.Sha case strings.HasPrefix(ref.Ref, "refs/tags/"): tags = append(tags, ref.Ref[len("refs/tags/"):]) } } if len(tags) > 0 { tags = append([]string{"master"}, tags...) } if len(tag) == 0 { // Check revision tag. if commit == savedEtag { return nil, errNotModified } match["tag"] = "master" } else { match["tag"] = tag } // Get files. var tree struct { Tree []struct { Url string Path string Type string } Url string } err = com.HttpGetJSON(client, com.Expand("https://api.github.com/repos/{owner}/{repo}/git/trees/{tag}?recursive=1&{cred}", match), &tree) if err != nil { return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> get trees: " + err.Error()) } // Because Github API URLs are case-insensitive, we need to check that the // userRepo returned from Github matches the one that we are requesting. if !strings.HasPrefix(tree.Url, com.Expand("https://api.github.com/repos/{owner}/{repo}/", match)) { return nil, errors.New("Github import path has incorrect case") } // Get source file data and subdirectories. dirPrefix := match["dir"] if dirPrefix != "" { dirPrefix = dirPrefix[1:] + "/" } preLen := len(dirPrefix) isGoPro := false // Indicates whether it's a Go project. dirs := make([]string, 0, 5) files := make([]com.RawFile, 0, 5) for _, node := range tree.Tree { // Skip directories and files in wrong directories, get them later. if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) { continue } // Get files and check if directories have acceptable files. if d, f := path.Split(node.Path); utils.IsDocFile(f) && utils.FilterDirName(d) { // Check if it's a Go file. if !isGoPro && strings.HasSuffix(f, ".go") { isGoPro = true } // Check if file is in the directory that is corresponding to import path. if d == dirPrefix { // Yes. if !isGoPro && strings.HasSuffix(f, ".go") { isGoPro = true } files = append(files, &hv.Source{ SrcName: f, BrowseUrl: com.Expand("github.com/{owner}/{repo}/blob/{tag}/{0}", match, node.Path), RawSrcUrl: com.Expand("https://raw.github.com/{owner}/{repo}/{tag}/{0}", match, node.Path) + "?" + githubCred, }) } else { sd, _ := path.Split(d[preLen:]) sd = strings.TrimSuffix(sd, "/") if !checkDir(sd, dirs) { dirs = append(dirs, sd) } } } } if !isGoPro { return nil, com.NotFoundError{"Cannot find Go files, it's not a Go project"} } if len(files) == 0 && len(dirs) == 0 { return nil, com.NotFoundError{"Directory tree does not contain Go files and subdirs"} } // Fetch file from VCS. if err := com.FetchFiles(client, files, githubRawHeader); err != nil { return nil, err } // Get addtional information: forks, watchers. var note struct { Homepage string Fork bool Parent struct { Html string `json:"html_url"` } Issues int `json:"open_issues_count"` Stars int `json:"watchers_count"` Forks int `json:"forks_count"` } err = com.HttpGetJSON(client, com.Expand("https://api.github.com/repos/{owner}/{repo}?{cred}", match), ¬e) if err != nil { return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> get note: " + err.Error()) } // Start generating data. w := &hv.Walker{ LineFmt: "#L%d", Pdoc: &hv.Package{ PkgInfo: &hv.PkgInfo{ ImportPath: match["importPath"], ProjectName: match["repo"], ProjectPath: com.Expand("github.com/{owner}/{repo}/blob/{tag}", match), ViewDirPath: com.Expand("github.com/{owner}/{repo}/blob/{tag}{dir}", match), Tags: strings.Join(tags, "|||"), Ptag: commit, Vcs: "GitHub", Issues: note.Issues, Stars: note.Stars, Forks: note.Forks, }, PkgDecl: &hv.PkgDecl{ Tag: tag, Dirs: dirs, }, }, } if len(note.Homepage) > 0 { w.Pdoc.Homepage = note.Homepage } if note.Fork { w.Pdoc.ForkUrl = note.Parent.Html } srcs := make([]*hv.Source, 0, len(files)) srcMap := make(map[string]*hv.Source) for _, f := range files { s, _ := f.(*hv.Source) srcs = append(srcs, s) if !strings.HasSuffix(f.Name(), "_test.go") { srcMap[f.Name()] = s } } pdoc, err := w.Build(&hv.WalkRes{ WalkDepth: hv.WD_All, WalkType: hv.WT_Memory, WalkMode: hv.WM_All, Srcs: srcs, }) if err != nil { return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> Fail to build: " + err.Error()) } if len(tag) == 0 && w.Pdoc.IsCmd { err = generateHv(match["importPath"], srcMap) } return pdoc, err }