func generateHv(importPath string, srcMap map[string]*hv.Source) error { w := &hv.Walker{ Pdoc: &hv.Package{ PkgInfo: &hv.PkgInfo{ ImportPath: importPath, }, }, SrcFiles: srcMap, } hvs, err := w.Render() if err != nil { return errors.New("doc.generateHv(" + importPath + ") -> Fail to render: " + err.Error()) } buf := new(bytes.Buffer) for name, data := range hvs { filePath := "." + utils.HvJsPath + importPath + "/" + name + ".js" buf.Reset() buf.WriteString("document.write(\"") buf.Write(com.Html2JS(data)) buf.WriteString("\")") if _, err := com.SaveFile(filePath, buf.Bytes()); err != nil { return errors.New("doc.generateHv(" + importPath + ") -> Save hv: " + err.Error()) } } return nil }
// SavePkgDoc saves readered readme.md file data. func SavePkgDoc(docPath string, readmes map[string][]byte) { for lang, data := range readmes { if len(data) == 0 { continue } if data[0] == '\n' { data = data[1:] } data = com.Html2JS(data) localeDocPath := DocsJsPath + docPath + "_RM_" + lang buf := new(bytes.Buffer) buf.WriteString("document.write(\"") buf.Write(data) buf.WriteString("\")") if _, err := com.SaveFile("."+localeDocPath+".js", buf.Bytes()); err != nil { beego.Error("utils.SavePkgDoc(", localeDocPath, ") ->", err) } } }
// getBitbucketDoc downloads tarball from bitbucket.org. func getBitbucketDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { // Check version control. if m := bitbucketEtagRe.FindStringSubmatch(nod.Value); m != nil { match["vcs"] = m[1] } else { var repo struct { Scm string } if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil { return nil, err } match["vcs"] = repo.Scm } if nod.Type == BRANCH { if len(nod.Value) == 0 { match["commit"] = defaultTags[match["vcs"]] } else { match["commit"] = nod.Value } } if nod.IsGetDeps { if nod.Type == COMMIT { tags := make(map[string]string) for _, nodeType := range []string{"branches", "tags"} { var nodes map[string]struct { Node string } if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/{0}", match, nodeType), &nodes); err != nil { return nil, err } for t, n := range nodes { tags[t] = n.Node } } // Check revision tag. var err error match["tag"], match["commit"], err = bestTag(tags, defaultTags[match["vcs"]]) if err != nil { return nil, err } nod.Value = match["commit"] } } else { // Check downlaod type. switch nod.Type { case TAG, COMMIT, BRANCH: match["commit"] = nod.Value default: return nil, errors.New("Unknown node type: " + nod.Type) } } // We use .tar.gz here. // zip : https://bitbucket.org/{owner}/{repo}/get/{commit}.zip // tarball : https://bitbucket.org/{owner}/{repo}/get/{commit}.tar.gz // Downlaod archive. p, err := com.HttpGetBytes(client, com.Expand("https://bitbucket.org/{owner}/{repo}/get/{commit}.tar.gz", match), nil) if err != nil { return nil, err } var installPath string if nod.ImportPath == nod.DownloadURL { suf := "." + nod.Value if len(suf) == 1 { suf = "" } projectPath := com.Expand("bitbucket.org/{owner}/{repo}", match) installPath = installRepoPath + "/" + projectPath + suf nod.ImportPath = projectPath } else { installPath = installRepoPath + "/" + nod.ImportPath } // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) gzr, err := gzip.NewReader(bytes.NewReader(p)) if err != nil { return nil, err } defer gzr.Close() tr := tar.NewReader(gzr) var autoPath string // Auto path is the root path that generated by bitbucket.org. // Get source file data. dirs := make([]string, 0, 5) for { h, err := tr.Next() if err == io.EOF { break } else if err != nil { return nil, err } fn := h.Name // In case that we find directory, usually we should not. if strings.HasSuffix(fn, "/") { continue } // Check root path. if len(autoPath) == 0 { autoPath = fn[:strings.Index(fn, "/")] } absPath := strings.Replace(fn, autoPath, installPath, 1) // Create diretory before create file. dir := path.Dir(absPath) if !checkDir(dir, dirs) && !(!ctx.Bool("example") && strings.Contains(absPath, "example")) { dirs = append(dirs, dir) os.MkdirAll(dir+"/", os.ModePerm) } // Get data from archive. fbytes := make([]byte, h.Size) if _, err := io.ReadFull(tr, fbytes); err != nil { return nil, err } _, err = com.SaveFile(absPath, fbytes) if err != nil { return nil, err } // Set modify time. os.Chtimes(absPath, h.AccessTime, h.ModTime) } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d+"/", match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }
// getGoogleDoc downloads raw files from code.google.com. func getGoogleDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { setupGoogleMatch(match) // Check version control. if err := getGoogleVCS(client, match); err != nil { return nil, errors.New("fail to get vcs " + nod.ImportPath + " : " + err.Error()) } switch nod.Type { case BRANCH: if len(nod.Value) == 0 { match["tag"] = defaultTags[match["vcs"]] // Only get and check revision with the latest version. p, err := com.HttpGetBytes(client, com.Expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/?r={tag}", match), nil) if err != nil { log.Error("GET", "Fail to get revision") log.Error("", err.Error()) break } if m := googleRevisionRe.FindSubmatch(p); m == nil { log.Error("GET", "Fail to get revision") log.Error("", err.Error()) } else { etag := string(m[1]) if etag == nod.Revision { log.Log("GET Package hasn't changed: %s", nod.ImportPath) return nil, nil } nod.Revision = etag } } else { match["tag"] = nod.Value } case TAG, COMMIT: match["tag"] = nod.Value default: return nil, errors.New("Unknown node type: " + nod.Type) } var installPath string projectPath := GetProjectPath(nod.ImportPath) if nod.ImportPath == nod.DownloadURL { suf := "." + nod.Value if len(suf) == 1 { suf = "" } installPath = installRepoPath + "/" + projectPath + suf } else { installPath = installRepoPath + "/" + projectPath } // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) if match["vcs"] == "svn" { com.ColorLog("[WARN] SVN detected, may take very long time.\n") rootPath := com.Expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}", match) d, f := path.Split(rootPath) err := downloadFiles(client, match, d, installPath+"/", match["tag"], []string{f + "/"}) if err != nil { return nil, errors.New("Fail to download " + nod.ImportPath + " : " + err.Error()) } } p, err := com.HttpGetBytes(client, com.Expand("http://{subrepo}{dot}{repo}.googlecode.com/archive/{tag}.zip", match), nil) if err != nil { return nil, errors.New("Fail to download " + nod.ImportPath + " : " + err.Error()) } r, err := zip.NewReader(bytes.NewReader(p), int64(len(p))) if err != nil { return nil, errors.New(nod.ImportPath + " -> new zip: " + err.Error()) } nameLen := strings.Index(r.File[0].Name, "/") dirPrefix := match["dir"] if len(dirPrefix) != 0 { dirPrefix = dirPrefix[1:] + "/" } dirs := make([]string, 0, 5) for _, f := range r.File { absPath := strings.Replace(f.Name, f.Name[:nameLen], installPath, 1) // Create diretory before create file. dir := path.Dir(absPath) if !checkDir(dir, dirs) && !(!ctx.Bool("example") && strings.Contains(absPath, "example")) { dirs = append(dirs, dir+"/") os.MkdirAll(dir+"/", os.ModePerm) } // Get file from archive. r, err := f.Open() if err != nil { return nil, err } fbytes := make([]byte, f.FileInfo().Size()) _, err = io.ReadFull(r, fbytes) if err != nil { return nil, err } _, err = com.SaveFile(absPath, fbytes) if err != nil { return nil, err } } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d, match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }
// SaveDocPage saves doc. content to JS file(s), // it returns max index of JS file(s); // it returns -1 when error occurs. func SaveDocPage(docPath string, data []byte) int { data = com.Html2JS(data) docPath = DocsJsPath + docPath buf := new(bytes.Buffer) count := 0 d := string(data) l := len(d) if l < 80000 { buf.WriteString("document.write(\"") buf.Write(data) buf.WriteString("\")") if _, err := com.SaveFile("."+docPath+".js", buf.Bytes()); err != nil { beego.Error("utils.SaveDocPage(", docPath, ") ->", err) return -1 } } else { // Too large, need to sperate. start := 0 end := start + 40000 for { if end >= l { end = l } else { // Need to break in space. for { if d[end-3:end] == "/b>" { break } end += 1 if end >= l { break } } } buf.WriteString("document.write(\"") buf.WriteString(d[start:end]) buf.WriteString("\")\n") p := docPath if count != 0 { p += fmt.Sprintf("-%d", count) } if _, err := com.SaveFile("."+p+".js", buf.Bytes()); err != nil { beego.Error("utils.SaveDocPage(", p, ") ->", err) return -1 } if end >= l { break } buf.Reset() start = end end += 204800 count++ } } return count }
func ExampleSaveFile() { s := "ExampleSaveFile" n, err := com.SaveFile("SaveFile.txt", []byte(s)) fmt.Println(n, err) }
// getGithubDoc downloads tarball from github.com. func getGithubDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { // Check downlaod type. switch nod.Type { case BRANCH: if len(nod.Value) == 0 { match["sha"] = MASTER // Only get and check revision with the latest version. var refs []*struct { Ref string Url string Object struct { Sha string Type string Url string } } err := com.HttpGetJSON(client, com.Expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), &refs) if err != nil { log.Error("GET", "Fail to get revision") log.Error("", err.Error()) break } var etag string COMMIT_LOOP: for _, ref := range refs { switch { case strings.HasPrefix(ref.Ref, "refs/heads/master"): etag = ref.Object.Sha break COMMIT_LOOP } } if etag == nod.Revision { log.Log("GET Package hasn't changed: %s", nod.ImportPath) return nil, nil } nod.Revision = etag } else { match["sha"] = nod.Value } case TAG, COMMIT: match["sha"] = nod.Value default: return nil, errors.New("Unknown node type: " + nod.Type) } // We use .zip here. // zip: https://github.com/{owner}/{repo}/archive/{sha}.zip // tarball: https://github.com/{owner}/{repo}/tarball/{sha} // Downlaod archive. p, err := com.HttpGetBytes(client, com.Expand("https://github.com/{owner}/{repo}/archive/{sha}.zip", match), nil) if err != nil { return nil, errors.New("Fail to donwload Github repo -> " + err.Error()) } shaName := com.Expand("{repo}-{sha}", match) if nod.Type == "tag" { shaName = strings.Replace(shaName, "-v", "-", 1) } var installPath string if nod.ImportPath == nod.DownloadURL { suf := "." + nod.Value if len(suf) == 1 { suf = "" } projectPath := com.Expand("github.com/{owner}/{repo}", match) installPath = installRepoPath + "/" + projectPath + suf nod.ImportPath = projectPath } else { installPath = installRepoPath + "/" + nod.ImportPath } // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) r, err := zip.NewReader(bytes.NewReader(p), int64(len(p))) if err != nil { return nil, errors.New(nod.ImportPath + " -> new zip: " + err.Error()) } dirs := make([]string, 0, 5) // Need to add root path because we cannot get from tarball. dirs = append(dirs, installPath+"/") for _, f := range r.File { absPath := strings.Replace(f.Name, shaName, installPath, 1) // Create diretory before create file. os.MkdirAll(path.Dir(absPath)+"/", os.ModePerm) compareDir: switch { case strings.HasSuffix(absPath, "/"): // Directory. // Check if current directory is example. if !(!ctx.Bool("example") && strings.Contains(absPath, "example")) { for _, d := range dirs { if d == absPath { break compareDir } } dirs = append(dirs, absPath) } default: // Get file from archive. r, err := f.Open() if err != nil { return nil, err } fbytes := make([]byte, f.FileInfo().Size()) _, err = io.ReadFull(r, fbytes) if err != nil { return nil, err } _, err = com.SaveFile(absPath, fbytes) if err != nil { return nil, err } // Set modify time. os.Chtimes(absPath, f.ModTime(), f.ModTime()) } } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d, match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }
// getLaunchpadDoc downloads tarball from launchpad.net. func getLaunchpadDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { if match["project"] != "" && match["series"] != "" { rc, err := com.HttpGet(client, com.Expand("https://code.launchpad.net/{project}{series}/.bzr/branch-format", match), nil) _, isNotFound := err.(com.NotFoundError) switch { case err == nil: rc.Close() // The structure of the import path is launchpad.net/{root}/{dir}. case isNotFound: // The structure of the import path is is launchpad.net/{project}/{dir}. match["repo"] = match["project"] match["dir"] = com.Expand("{series}{dir}", match) default: return nil, err } } var downloadPath string // Check if download with specific revision. if len(nod.Value) == 0 { downloadPath = com.Expand("https://bazaar.launchpad.net/+branch/{repo}/tarball", match) } else { downloadPath = com.Expand("https://bazaar.launchpad.net/+branch/{repo}/tarball/"+nod.Value, match) } // Scrape the repo browser to find the project revision and individual Go files. p, err := com.HttpGetBytes(client, downloadPath, nil) if err != nil { return nil, err } installPath := installRepoPath + "/" + nod.ImportPath // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) gzr, err := gzip.NewReader(bytes.NewReader(p)) if err != nil { return nil, err } defer gzr.Close() tr := tar.NewReader(gzr) var autoPath string // Auto path is the root path that generated by bitbucket.org. // Get source file data. dirs := make([]string, 0, 5) for { h, err := tr.Next() if err == io.EOF { break } else if err != nil { return nil, err } fn := h.Name // Check root path. if len(autoPath) == 0 { autoPath = fn[:strings.Index(fn, match["repo"])+len(match["repo"])] } absPath := strings.Replace(fn, autoPath, installPath, 1) switch { case h.FileInfo().IsDir(): // Directory. // Create diretory before create file. os.MkdirAll(absPath+"/", os.ModePerm) // Check if current directory is example. if !(!ctx.Bool("example") && strings.Contains(absPath, "example")) { dirs = append(dirs, absPath) } case !strings.HasPrefix(fn, "."): // Get data from archive. fbytes := make([]byte, h.Size) if _, err := io.ReadFull(tr, fbytes); err != nil { return nil, err } _, err = com.SaveFile(absPath, fbytes) if err != nil { return nil, err } } } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d+"/", match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }
func runUpdate(ctx *cli.Context) { setup(ctx) isAnythingUpdated := false // Load local version info. localVerInfo := loadLocalVerInfo() // Get remote version info. var remoteVerInfo version if err := com.HttpGetJSON(http.DefaultClient, "http://gopm.io/VERSION.json", &remoteVerInfo); err != nil { log.Error("Update", "Fail to fetch VERSION.json") log.Fatal("", err.Error()) } // Package name list. if remoteVerInfo.PackageNameList > localVerInfo.PackageNameList { log.Log("Updating pkgname.list...%v > %v", localVerInfo.PackageNameList, remoteVerInfo.PackageNameList) data, err := com.HttpGetBytes(http.DefaultClient, "https://raw2.github.com/gpmgo/docs/master/pkgname.list", nil) if err != nil { log.Error("Update", "Fail to update pkgname.list") log.Fatal("", err.Error()) } _, err = com.SaveFile(path.Join(doc.HomeDir, doc.PKG_NAME_LIST_PATH), data) if err != nil { log.Error("Update", "Fail to save pkgname.list") log.Fatal("", err.Error()) } log.Log("Update pkgname.list to %v succeed!", remoteVerInfo.PackageNameList) isAnythingUpdated = true } // Gopm. if remoteVerInfo.Gopm > localVerInfo.Gopm { log.Log("Updating gopm...%v > %v", localVerInfo.Gopm, remoteVerInfo.Gopm) installRepoPath = doc.HomeDir + "/repos" tmpDirPath := filepath.Join(doc.HomeDir, "temp") tmpBinPath := filepath.Join(tmpDirPath, "gopm") if runtime.GOOS == "windows" { tmpBinPath += ".exe" } os.MkdirAll(tmpDirPath, os.ModePerm) os.Remove(tmpBinPath) // Fetch code. args := []string{"bin", "-u", "-d"} if ctx.Bool("verbose") { args = append(args, "-v") } args = append(args, []string{"github.com/gpmgo/gopm", tmpDirPath}...) stdout, stderr, err := com.ExecCmd("gopm", args...) if err != nil { log.Error("Update", "Fail to execute 'gopm bin -u -d github.com/gpmgo/gopm "+tmpDirPath+"'") log.Fatal("", err.Error()) } if len(stderr) > 0 { fmt.Print(stderr) } if len(stdout) > 0 { fmt.Print(stdout) } // Check if previous steps were successful. if !com.IsExist(tmpBinPath) { log.Error("Update", "Fail to continue command") log.Fatal("", "Previous steps weren't successful, no binary produced") } movePath := exePath() log.Log("New binary will be replaced for %s", movePath) // Move binary to given directory. if runtime.GOOS != "windows" { err := os.Rename(tmpBinPath, movePath) if err != nil { log.Error("Update", "Fail to move binary") log.Fatal("", err.Error()) } os.Chmod(movePath+"/"+path.Base(tmpBinPath), os.ModePerm) } else { batPath := filepath.Join(tmpDirPath, "update.bat") f, err := os.Create(batPath) if err != nil { log.Error("Update", "Fail to generate bat file") log.Fatal("", err.Error()) } f.WriteString("@echo off\r\n") f.WriteString(fmt.Sprintf("ping -n 1 127.0.0.1>nul\r\ncopy \"%v\" \"%v\" >nul\r\ndel \"%v\" >nul\r\n\r\n", tmpBinPath, movePath, tmpBinPath)) //f.WriteString(fmt.Sprintf("del \"%v\"\r\n", batPath)) f.Close() attr := &os.ProcAttr{ Dir: workDir, Env: os.Environ(), Files: []*os.File{os.Stdin, os.Stdout, os.Stderr}, } _, err = os.StartProcess(batPath, []string{batPath}, attr) if err != nil { log.Error("Update", "Fail to start bat process") log.Fatal("", err.Error()) } } log.Success("SUCC", "Update", "Command execute successfully!") isAnythingUpdated = true } // Save JSON. f, err := os.Create(path.Join(doc.HomeDir, doc.VER_PATH)) if err != nil { log.Error("Update", "Fail to create VERSION.json") log.Fatal("", err.Error()) } if err := json.NewEncoder(f).Encode(&remoteVerInfo); err != nil { log.Error("Update", "Fail to encode VERSION.json") log.Fatal("", err.Error()) } if !isAnythingUpdated { log.Log("Nothing need to be updated") } log.Log("Exit old gopm") }
// getGithubDoc downloads tarball from git.oschina.com. func getOSCDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) { // Check downlaod type. switch nod.Type { case BRANCH: if len(nod.Value) == 0 { match["sha"] = MASTER } else { match["sha"] = nod.Value } case TAG, COMMIT: match["sha"] = nod.Value default: return nil, errors.New("Unknown node type: " + nod.Type) } // zip: http://{projectRoot}/repository/archive?ref={sha} // Downlaod archive. p, err := com.HttpGetBytes(client, com.Expand("http://git.oschina.net/{owner}/{repo}/repository/archive?ref={sha}", match), nil) if err != nil { return nil, errors.New("Fail to donwload OSChina repo -> " + err.Error()) } var installPath string if nod.ImportPath == nod.DownloadURL { suf := "." + nod.Value if len(suf) == 1 { suf = "" } projectPath := com.Expand("git.oschina.net/{owner}/{repo}", match) installPath = installRepoPath + "/" + projectPath + suf nod.ImportPath = projectPath } else { installPath = installRepoPath + "/" + nod.ImportPath } // Remove old files. os.RemoveAll(installPath + "/") os.MkdirAll(installPath+"/", os.ModePerm) r, err := zip.NewReader(bytes.NewReader(p), int64(len(p))) if err != nil { return nil, errors.New("Fail to unzip OSChina repo -> " + err.Error()) } nameLen := len(match["repo"]) dirs := make([]string, 0, 5) // Need to add root path because we cannot get from tarball. dirs = append(dirs, installPath+"/") for _, f := range r.File { fileName := f.Name[nameLen+1:] absPath := installPath + "/" + fileName if strings.HasSuffix(absPath, "/") { dirs = append(dirs, absPath) os.MkdirAll(absPath, os.ModePerm) continue } // Get file from archive. r, err := f.Open() if err != nil { return nil, errors.New("Fail to open OSChina repo -> " + err.Error()) } fbytes := make([]byte, f.FileInfo().Size()) _, err = io.ReadFull(r, fbytes) if err != nil { return nil, err } _, err = com.SaveFile(absPath, fbytes) if err != nil { return nil, err } } var imports []string // Check if need to check imports. if nod.IsGetDeps { for _, d := range dirs { importPkgs, err := CheckImports(d, match["importPath"], nod) if err != nil { return nil, err } imports = append(imports, importPkgs...) } } return imports, err }