Exemple #1
0
func TestGetJob(t *testing.T) {
	pathURL := fmt.Sprintf("%s/api/jobs", ts.URL)
	jsonErr := models.JSONError{}
	err := com.HttpGetJSON(client, fmt.Sprintf("%s/1000", pathURL), &jsonErr)
	if err == nil {
		t.Error("expected resource not found")
	}
	job := &models.Job{
		Title:       "my job",
		Description: "whacko job",
	}
	q := db.Conn.Create(job)
	if q.Error != nil {
		t.Error(q.Error)
	}

	rst := &models.Job{}
	err = com.HttpGetJSON(client, fmt.Sprintf("%s/%d", pathURL, job.ID), rst)
	if err != nil {
		t.Errorf("getting record %v", err)
	}
	if rst.Title != job.Title {
		t.Errorf("expected %s got %s", job.Title, rst.Title)
	}
	if rst.ID != job.ID {
		t.Errorf("expected %d got %d", job.ID, rst.ID)
	}
}
Exemple #2
0
func TestGetIndex(t *testing.T) {
	data := []struct {
		title string
	}{
		{"geernest"},
		{"mwanza"},
		{"Tanzania"},
	}

	// create all the records.
	for _, v := range data {
		j := models.Job{Title: v.title}
		q := db.Conn.Create(&j)

		if q.Error != nil {
			t.Errorf("creating a job record %v", q.Error)
		}
	}

	pathURL := fmt.Sprintf("%s/api/jobs", ts.URL)
	rst := []*models.Job{}
	err := com.HttpGetJSON(client, pathURL, &rst)
	if err != nil {
		t.Errorf("retrieving jobs index %v", err)
	}
	if len(rst) < len(data) {
		t.Errorf("expected %d got %d", len(data), len(rst))
	}

	// verify the order is in descending
	if rst[0].Title != data[2].title {
		t.Errorf("expected %s got %s", data[2].title, rst[0].Title)
	}
}
Exemple #3
0
func NewSearch(key string) (*SearchPackages, error) {
	url := com.Expand(searchApi, map[string]string{
		"keyword": key,
	})
	packages := new(SearchPackages)
	err := com.HttpGetJSON(httpClient, url, packages)
	return packages, err
}
Exemple #4
0
// https://gowalker.org/api/v1/pkginfo?pkgname=github.com/astaxie/beego
func GetPkgInfo(pkgname string) (*PackageItem, error) {
	err := RefreshPkg(pkgname)
	if err != nil {
		return nil, err
	}
	url := com.Expand(pkginfoApi, map[string]string{
		"pkgname": pkgname,
	})
	pkginfo := new(PackageItem)
	if err = com.HttpGetJSON(httpClient, url, pkginfo); err != nil {
		return nil, err
	}
	if pkginfo.Id == 0 {
		return nil, ErrPkgNotExists
	}
	return pkginfo, err
}
Exemple #5
0
func getBitbucketRevision(client *http.Client, n *Node) error {
	if len(n.Value) == 0 {
		var repo struct {
			Scm string
		}
		if err := com.HttpGetJSON(client, fmt.Sprintf("https://api.bitbucket.org/1.0/repositories/%s", strings.TrimPrefix(n.ImportPath, "bitbucket.org/")), &repo); err != nil {
			return fmt.Errorf("fail to fetch page: %v", err)
		}
		n.Value = defaultTags[repo.Scm]
	}
	data, err := com.HttpGetBytes(client, fmt.Sprintf("https://%s/commits/%s", n.ImportPath, n.Value), nil)
	if err != nil {
		return fmt.Errorf("fail to get revision(%s): %v", n.ImportPath, err)
	}
	m := bitbucketRevisionPattern.FindSubmatch(data)
	if m == nil {
		return fmt.Errorf("cannot find revision in page: %s", n.ImportPath)
	}
	n.Revision = strings.TrimPrefix(string(m[0]), `data-revision="`)
	n.ArchivePath = path.Join(setting.ArchivePath, n.ImportPath, n.Revision+".zip")
	return nil
}
Exemple #6
0
func checkFileUpdates() {
	log.Debug("Checking file updates")

	type tree struct {
		AppName, ApiUrl, RawUrl, TreeName, Prefix string
	}

	trees := make([]*tree, len(setting.Apps))
	for i, app := range setting.Apps {
		trees[i] = &tree{
			AppName:  app.Name,
			ApiUrl:   "https://api.github.com/repos/" + app.RepoName + "/git/trees/master?recursive=1&" + setting.GithubCred,
			RawUrl:   "https://raw.github.com/" + app.RepoName + "/master/",
			TreeName: "conf/docTree_" + app.Name + ".json",
			Prefix:   "docs/" + app.Name + "/",
		}
	}

	for _, tree := range trees {
		var tmpTree struct {
			Tree []*oldDocNode
		}

		if err := com.HttpGetJSON(httpClient, tree.ApiUrl, &tmpTree); err != nil {
			log.Error("Fail to get trees: %v", err)
			return
		}

		var saveTree struct {
			Tree []*oldDocNode
		}
		saveTree.Tree = make([]*oldDocNode, 0, len(tmpTree.Tree))

		// Compare SHA.
		files := make([]com.RawFile, 0, len(tmpTree.Tree))
		for _, node := range tmpTree.Tree {
			// Skip non-md files and "README.md".
			if node.Type != "blob" || (!strings.HasSuffix(node.Path, ".md") &&
				!strings.Contains(node.Path, "images") &&
				!strings.HasSuffix(node.Path, ".json")) ||
				strings.HasPrefix(strings.ToLower(node.Path), "readme") {
				continue
			}

			name := strings.TrimSuffix(node.Path, ".md")

			if checkSHA(tree.AppName, name, node.Sha, tree.Prefix) {
				log.Info("Need to update: %s", name)
				files = append(files, &rawFile{
					name:   name,
					rawURL: tree.RawUrl + node.Path,
				})
			}

			saveTree.Tree = append(saveTree.Tree, &oldDocNode{
				Path: name,
				Sha:  node.Sha,
			})
			// For save purpose, reset name.
			node.Path = name
		}

		// Fetch files.
		if err := com.FetchFiles(httpClient, files, nil); err != nil {
			log.Error("Fail to fetch files: %v", err)
			return
		}

		// Update data.
		for _, f := range files {
			os.MkdirAll(path.Join(tree.Prefix, path.Dir(f.Name())), os.ModePerm)
			suf := ".md"
			if strings.Contains(f.Name(), "images") ||
				strings.HasSuffix(f.Name(), ".json") {
				suf = ""
			}
			fw, err := os.Create(tree.Prefix + f.Name() + suf)
			if err != nil {
				log.Error("Fail to open file: %v", err)
				continue
			}

			_, err = fw.Write(f.Data())
			fw.Close()
			if err != nil {
				log.Error("Fail to write data: %v", err)
				continue
			}
		}

		// Save documentation information.
		f, err := os.Create(tree.TreeName)
		if err != nil {
			log.Error("Fail to save data: %v", err)
			return
		}

		e := json.NewEncoder(f)
		err = e.Encode(&saveTree)
		if err != nil {
			log.Error("Fail to encode data: %v", err)
			return
		}
		f.Close()
	}

	log.Debug("Finish check file updates")
	for _, app := range setting.Apps {
		parseDocs(app.Name)
		initDocMap(app.Name)
	}
}
Exemple #7
0
func checkDocUpdates() {
	beego.Trace("Checking documentation updates")

	var tmpTree struct {
		Tree []*docNode
	}
	err := com.HttpGetJSON(httpClient, "https://api.github.com/repos/beego/beedoc/git/trees/master?recursive=1&"+githubCred, &tmpTree)
	if err != nil {
		beego.Error("models.checkDocUpdates -> get trees:", err.Error())
		return
	}

	// Compare SHA.
	files := make([]com.RawFile, 0, len(tmpTree.Tree))
	for _, node := range tmpTree.Tree {
		// Skip non-md files and "README.MD".
		if !strings.HasSuffix(node.Path, ".md") || node.Path == "README.md" {
			continue
		}

		// Trim ".md".
		name := node.Path[:len(node.Path)-3]
		if checkSHA(name, node.Sha) {
			beego.Info("Need to update:", name)
			files = append(files, &rawFile{
				name:   name,
				rawURL: "https://raw.github.com/beego/beedoc/master/" + node.Path,
			})
		}

		// For save purpose, reset name.
		node.Path = name
	}

	// Fetch files.
	if err := com.FetchFiles(httpClient, files, nil); err != nil {
		beego.Error("models.checkDocUpdates -> fetch files:", err.Error())
		return
	}

	// Update data.
	for _, f := range files {
		fw, err := os.Create("docs/" + f.Name() + ".md")
		if err != nil {
			beego.Error("models.checkDocUpdates -> open file:", err.Error())
			return
		}

		_, err = fw.Write(f.Data())
		fw.Close()
		if err != nil {
			beego.Error("models.checkDocUpdates -> write data:", err.Error())
			return
		}
	}

	beego.Trace("Finish check documentation updates")
	initDocMap()

	// Save documentation information.
	f, err := os.Create("conf/docTree.json")
	if err != nil {
		beego.Error("models.checkDocUpdates -> save data:", err.Error())
		return
	}
	defer f.Close()

	e := json.NewEncoder(f)
	err = e.Encode(&tmpTree)
	if err != nil {
		beego.Error("models.checkDocUpdates -> encode data:", err.Error())
		return
	}
}
Exemple #8
0
// getBitbucketDoc downloads tarball from bitbucket.org.
func getBitbucketDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) {
	// Check version control.
	if m := bitbucketEtagRe.FindStringSubmatch(nod.Value); m != nil {
		match["vcs"] = m[1]
	} else {
		var repo struct {
			Scm string
		}
		if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil {
			return nil, err
		}
		match["vcs"] = repo.Scm
	}

	if nod.Type == BRANCH {
		if len(nod.Value) == 0 {
			match["commit"] = defaultTags[match["vcs"]]
		} else {
			match["commit"] = nod.Value
		}
	}

	if nod.IsGetDeps {
		if nod.Type == COMMIT {
			tags := make(map[string]string)
			for _, nodeType := range []string{"branches", "tags"} {
				var nodes map[string]struct {
					Node string
				}
				if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/{0}", match, nodeType), &nodes); err != nil {
					return nil, err
				}
				for t, n := range nodes {
					tags[t] = n.Node
				}
			}

			// Check revision tag.
			var err error
			match["tag"], match["commit"], err = bestTag(tags, defaultTags[match["vcs"]])
			if err != nil {
				return nil, err
			}

			nod.Value = match["commit"]
		}
	} else {
		// Check downlaod type.
		switch nod.Type {
		case TAG, COMMIT, BRANCH:
			match["commit"] = nod.Value
		default:
			return nil, errors.New("Unknown node type: " + nod.Type)
		}
	}

	// We use .tar.gz here.
	// zip : https://bitbucket.org/{owner}/{repo}/get/{commit}.zip
	// tarball : https://bitbucket.org/{owner}/{repo}/get/{commit}.tar.gz

	// Downlaod archive.
	p, err := com.HttpGetBytes(client, com.Expand("https://bitbucket.org/{owner}/{repo}/get/{commit}.tar.gz", match), nil)
	if err != nil {
		return nil, err
	}

	var installPath string
	if nod.ImportPath == nod.DownloadURL {
		suf := "." + nod.Value
		if len(suf) == 1 {
			suf = ""
		}
		projectPath := com.Expand("bitbucket.org/{owner}/{repo}", match)
		installPath = installRepoPath + "/" + projectPath + suf
		nod.ImportPath = projectPath
	} else {
		installPath = installRepoPath + "/" + nod.ImportPath
	}

	// Remove old files.
	os.RemoveAll(installPath + "/")
	os.MkdirAll(installPath+"/", os.ModePerm)

	gzr, err := gzip.NewReader(bytes.NewReader(p))
	if err != nil {
		return nil, err
	}
	defer gzr.Close()

	tr := tar.NewReader(gzr)

	var autoPath string // Auto path is the root path that generated by bitbucket.org.
	// Get source file data.
	dirs := make([]string, 0, 5)
	for {
		h, err := tr.Next()
		if err == io.EOF {
			break
		} else if err != nil {
			return nil, err
		}

		fn := h.Name

		// In case that we find directory, usually we should not.
		if strings.HasSuffix(fn, "/") {
			continue
		}

		// Check root path.
		if len(autoPath) == 0 {
			autoPath = fn[:strings.Index(fn, "/")]
		}
		absPath := strings.Replace(fn, autoPath, installPath, 1)

		// Create diretory before create file.
		dir := path.Dir(absPath)
		if !checkDir(dir, dirs) && !(!ctx.Bool("example") && strings.Contains(absPath, "example")) {
			dirs = append(dirs, dir)
			os.MkdirAll(dir+"/", os.ModePerm)
		}

		// Get data from archive.
		fbytes := make([]byte, h.Size)
		if _, err := io.ReadFull(tr, fbytes); err != nil {
			return nil, err
		}

		_, err = com.SaveFile(absPath, fbytes)
		if err != nil {
			return nil, err
		}

		// Set modify time.
		os.Chtimes(absPath, h.AccessTime, h.ModTime)
	}

	var imports []string

	// Check if need to check imports.
	if nod.IsGetDeps {
		for _, d := range dirs {
			importPkgs, err := CheckImports(d+"/", match["importPath"], nod)
			if err != nil {
				return nil, err
			}
			imports = append(imports, importPkgs...)
		}
	}

	return imports, err
}
Exemple #9
0
func getBitbucketDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*hv.Package, error) {

	if m := bitbucketEtagRe.FindStringSubmatch(savedEtag); m != nil {
		match["vcs"] = m[1]
	} else {
		var repo struct {
			Scm string
		}
		if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil {
			return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> " + err.Error())
		}
		match["vcs"] = repo.Scm
	}

	// Get master commit.
	var branches map[string]struct {
		Node string
	}
	if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/branches", match), &branches); err != nil {
		return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get branches: " + err.Error())
	}
	match["commit"] = branches["default"].Node

	// Get all tags.
	tags := make([]string, 0, 5)
	var nodes map[string]struct {
		Node string
	}
	if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/tags", match), &nodes); err != nil {
		return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get nodes: " + err.Error())
	}
	for k := range nodes {
		tags = append(tags, k)
	}
	if len(tags) > 0 {
		tags = append([]string{defaultTags[match["vcs"]]}, tags...)
	}

	var etag string
	if len(tag) == 0 {
		// Check revision tag.
		etag = match["commit"]
		if etag == savedEtag {
			return nil, errNotModified
		}

		match["tag"] = defaultTags[match["vcs"]]
	} else {
		match["tag"] = tag
	}

	// Get files and directories.
	var node struct {
		Files []struct {
			Path string
		}
		Directories []string
	}

	if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/src/{tag}{dir}/", match), &node); err != nil {
		return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get trees: " + err.Error())
	}

	// Get source file data.
	files := make([]com.RawFile, 0, 5)
	for _, f := range node.Files {
		_, name := path.Split(f.Path)
		if utils.IsDocFile(name) {
			files = append(files, &hv.Source{
				SrcName:   name,
				BrowseUrl: com.Expand("bitbucket.org/{owner}/{repo}/src/{tag}/{0}", match, f.Path),
				RawSrcUrl: com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/raw/{tag}/{0}", match, f.Path),
			})
		}
	}

	// Get subdirectories.
	dirs := make([]string, 0, len(node.Directories))
	for _, d := range node.Directories {
		if utils.FilterDirName(d) {
			dirs = append(dirs, d)
		}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, com.NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	// Fetch file from VCS.
	if err := com.FetchFiles(client, files, nil); err != nil {
		return nil, err
	}

	// Start generating data.
	w := &hv.Walker{
		LineFmt: "#cl-%d",
		Pdoc: &hv.Package{
			PkgInfo: &hv.PkgInfo{
				ImportPath:  match["importPath"],
				ProjectName: match["repo"],
				ProjectPath: com.Expand("bitbucket.org/{owner}/{repo}/src/{tag}/", match),
				ViewDirPath: com.Expand("bitbucket.org/{owner}/{repo}/src/{tag}{dir}/", match),
				Tags:        strings.Join(tags, "|||"),
				Ptag:        etag,
				Vcs:         "BitBucket",
			},
			PkgDecl: &hv.PkgDecl{
				Tag:  tag,
				Dirs: dirs,
			},
		},
	}

	srcs := make([]*hv.Source, 0, len(files))
	srcMap := make(map[string]*hv.Source)
	for _, f := range files {
		s, _ := f.(*hv.Source)
		srcs = append(srcs, s)

		if !strings.HasSuffix(f.Name(), "_test.go") {
			srcMap[f.Name()] = s
		}
	}

	pdoc, err := w.Build(&hv.WalkRes{
		WalkDepth: hv.WD_All,
		WalkType:  hv.WT_Memory,
		WalkMode:  hv.WM_All,
		Srcs:      srcs,
	})
	if err != nil {
		return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> Fail to build: " + err.Error())
	}

	if len(tag) == 0 && w.Pdoc.IsCmd {
		err = generateHv(match["importPath"], srcMap)
	}

	return pdoc, err
}
Exemple #10
0
func getGithubDoc(match map[string]string, etag string) (*Package, error) {
	match["cred"] = setting.GitHubCredentials

	// Check revision.
	commit, err := getGithubRevision(com.Expand("github.com/{owner}/{repo}", match))
	if err != nil {
		return nil, fmt.Errorf("get revision: %v", err)
	}
	if commit == etag {
		return nil, ErrPackageNotModified
	}

	// Get files.
	var tree struct {
		Tree []struct {
			Url  string
			Path string
			Type string
		}
		Url string
	}

	if err := com.HttpGetJSON(Client,
		com.Expand("https://api.github.com/repos/{owner}/{repo}/git/trees/master?recursive=1&{cred}", match), &tree); err != nil {
		return nil, fmt.Errorf("get tree: %v", err)
	}

	// Because Github API URLs are case-insensitive, we need to check that the
	// userRepo returned from Github matches the one that we are requesting.
	if !strings.HasPrefix(tree.Url, com.Expand("https://api.github.com/repos/{owner}/{repo}/", match)) {
		return nil, errors.New("GitHub import path has incorrect case")
	}

	// Get source file data and subdirectories.
	dirPrefix := match["dir"]
	if dirPrefix != "" {
		dirPrefix = dirPrefix[1:] + "/"
	}
	dirLevel := len(strings.Split(dirPrefix, "/"))
	dirLength := len(dirPrefix)
	dirMap := make(map[string]bool)
	files := make([]com.RawFile, 0, 10)

	for _, node := range tree.Tree {
		// Skip directories and files in wrong directories, get them later.
		if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) {
			continue
		}

		// Get files and check if directories have acceptable files.
		if d, f := path.Split(node.Path); IsDocFile(f) {
			// Check if file is in the directory that is corresponding to import path.
			if d == dirPrefix {
				files = append(files, &Source{
					SrcName:   f,
					BrowseUrl: com.Expand("github.com/{owner}/{repo}/blob/master/{0}", match, node.Path),
					RawSrcUrl: com.Expand("https://raw.github.com/{owner}/{repo}/master/{0}?{1}", match, node.Path, setting.GitHubCredentials),
				})
				continue
			}

			// Otherwise, check if it's a direct sub-directory of import path.
			if len(strings.Split(d, "/"))-dirLevel == 1 {
				dirMap[d[dirLength:len(d)-1]] = true
				continue
			}
		}
	}

	dirs := base.MapToSortedStrings(dirMap)

	if len(files) == 0 && len(dirs) == 0 {
		return nil, ErrPackageNoGoFile
	} else if err := com.FetchFiles(Client, files, githubRawHeader); err != nil {
		return nil, fmt.Errorf("fetch files: %v", err)
	}

	// Start generating data.
	w := &Walker{
		LineFmt: "#L%d",
		Pdoc: &Package{
			PkgInfo: &models.PkgInfo{
				ImportPath:  match["importPath"],
				ProjectPath: com.Expand("github.com/{owner}/{repo}", match),
				ViewDirPath: com.Expand("github.com/{owner}/{repo}/tree/master/{importPath}", match),
				Etag:        commit,
				Subdirs:     strings.Join(dirs, "|"),
			},
		},
	}

	srcs := make([]*Source, 0, len(files))
	srcMap := make(map[string]*Source)
	for _, f := range files {
		s, _ := f.(*Source)
		srcs = append(srcs, s)

		if !strings.HasSuffix(f.Name(), "_test.go") {
			srcMap[f.Name()] = s
		}
	}

	pdoc, err := w.Build(&WalkRes{
		WalkDepth: WD_All,
		WalkType:  WT_Memory,
		WalkMode:  WM_All,
		Srcs:      srcs,
	})
	if err != nil {
		return nil, fmt.Errorf("error walking package: %v", err)
	}

	return pdoc, nil
}
Exemple #11
0
func checkFileUpdates() {
	beego.Trace("Checking file updates")

	type tree struct {
		ApiUrl, RawUrl, TreeName, Prefix string
	}

	var trees = []*tree{
		{
			ApiUrl:   "https://api.github.com/repos/beego/beedoc/git/trees/master?recursive=1&" + githubCred,
			RawUrl:   "https://raw.github.com/beego/beedoc/master/",
			TreeName: "conf/docTree.json",
			Prefix:   "docs/",
		},
		{
			ApiUrl:   "https://api.github.com/repos/beego/beeblog/git/trees/master?recursive=1&" + githubCred,
			RawUrl:   "https://raw.github.com/beego/beeblog/master/",
			TreeName: "conf/blogTree.json",
			Prefix:   "blog/",
		},
	}

	for _, tree := range trees {
		var tmpTree struct {
			Tree []*docNode
		}

		err := com.HttpGetJSON(httpClient, tree.ApiUrl, &tmpTree)
		if err != nil {
			beego.Error("models.checkFileUpdates -> get trees:", err.Error())
			return
		}

		var saveTree struct {
			Tree []*docNode
		}
		saveTree.Tree = make([]*docNode, 0, len(tmpTree.Tree))

		// Compare SHA.
		files := make([]com.RawFile, 0, len(tmpTree.Tree))
		for _, node := range tmpTree.Tree {
			// Skip non-md files and "README.md".
			if !strings.HasSuffix(node.Path, ".md") || node.Path == "README.md" {
				continue
			}

			// Trim ".md".
			name := node.Path[:len(node.Path)-3]
			if checkSHA(name, node.Sha, tree.Prefix) {
				beego.Info("Need to update:", name)
				files = append(files, &rawFile{
					name:   name,
					rawURL: tree.RawUrl + node.Path,
				})
			}

			saveTree.Tree = append(saveTree.Tree, &docNode{
				Path: name,
				Sha:  node.Sha,
			})
			// For save purpose, reset name.
			node.Path = name
		}

		// Fetch files.
		if err := com.FetchFiles(httpClient, files, nil); err != nil {
			beego.Error("models.checkFileUpdates -> fetch files:", err.Error())
			return
		}

		// Update data.
		for _, f := range files {
			fw, err := os.Create(tree.Prefix + f.Name() + ".md")
			if err != nil {
				beego.Error("models.checkFileUpdates -> open file:", err.Error())
				continue
			}

			_, err = fw.Write(f.Data())
			fw.Close()
			if err != nil {
				beego.Error("models.checkFileUpdates -> write data:", err.Error())
				continue
			}
		}

		// Save documentation information.
		f, err := os.Create(tree.TreeName)
		if err != nil {
			beego.Error("models.checkFileUpdates -> save data:", err.Error())
			return
		}

		e := json.NewEncoder(f)
		err = e.Encode(&saveTree)
		if err != nil {
			beego.Error("models.checkFileUpdates -> encode data:", err.Error())
			return
		}
		f.Close()
	}

	beego.Trace("Finish check file updates")
	initMaps()
}
Exemple #12
0
func runUpdate(ctx *cli.Context) {
	setup(ctx)

	isAnythingUpdated := false
	// Load local version info.
	localVerInfo := loadLocalVerInfo()

	// Get remote version info.
	var remoteVerInfo version
	if err := com.HttpGetJSON(http.DefaultClient, "http://gopm.io/VERSION.json", &remoteVerInfo); err != nil {
		log.Error("Update", "Fail to fetch VERSION.json")
		log.Fatal("", err.Error())
	}

	// Package name list.
	if remoteVerInfo.PackageNameList > localVerInfo.PackageNameList {
		log.Log("Updating pkgname.list...%v > %v",
			localVerInfo.PackageNameList, remoteVerInfo.PackageNameList)
		data, err := com.HttpGetBytes(http.DefaultClient, "https://raw2.github.com/gpmgo/docs/master/pkgname.list", nil)
		if err != nil {
			log.Error("Update", "Fail to update pkgname.list")
			log.Fatal("", err.Error())
		}

		if err = com.WriteFile(path.Join(doc.HomeDir, doc.PKG_NAME_LIST_PATH), data); err != nil {
			log.Error("Update", "Fail to save pkgname.list")
			log.Fatal("", err.Error())
		}
		log.Log("Update pkgname.list to %v succeed!", remoteVerInfo.PackageNameList)
		isAnythingUpdated = true
	}

	// Gopm.
	if remoteVerInfo.Gopm > localVerInfo.Gopm {
		log.Log("Updating gopm...%v > %v",
			localVerInfo.Gopm, remoteVerInfo.Gopm)
		installRepoPath = doc.HomeDir + "/repos"

		tmpDirPath := filepath.Join(doc.HomeDir, "temp")
		tmpBinPath := filepath.Join(tmpDirPath, "gopm")
		if runtime.GOOS == "windows" {
			tmpBinPath += ".exe"
		}

		os.MkdirAll(tmpDirPath, os.ModePerm)
		os.Remove(tmpBinPath)

		// Fetch code.
		args := []string{"bin", "-u", "-d"}
		if ctx.Bool("verbose") {
			args = append(args, "-v")
		}
		args = append(args, []string{"github.com/gpmgo/gopm", tmpDirPath}...)
		stdout, stderr, err := com.ExecCmd("gopm", args...)
		if err != nil {
			log.Error("Update", "Fail to execute 'gopm bin -u -d github.com/gpmgo/gopm "+tmpDirPath+"'")
			log.Fatal("", err.Error())
		}
		if len(stderr) > 0 {
			fmt.Print(stderr)
		}
		if len(stdout) > 0 {
			fmt.Print(stdout)
		}

		// Check if previous steps were successful.
		if !com.IsExist(tmpBinPath) {
			log.Error("Update", "Fail to continue command")
			log.Fatal("", "Previous steps weren't successful, no binary produced")
		}

		movePath := exePath()
		log.Log("New binary will be replaced for %s", movePath)
		// Move binary to given directory.
		if runtime.GOOS != "windows" {
			err := os.Rename(tmpBinPath, movePath)
			if err != nil {
				log.Error("Update", "Fail to move binary")
				log.Fatal("", err.Error())
			}
			os.Chmod(movePath+"/"+path.Base(tmpBinPath), os.ModePerm)
		} else {
			batPath := filepath.Join(tmpDirPath, "update.bat")
			f, err := os.Create(batPath)
			if err != nil {
				log.Error("Update", "Fail to generate bat file")
				log.Fatal("", err.Error())
			}
			f.WriteString("@echo off\r\n")
			f.WriteString(fmt.Sprintf("ping -n 1 127.0.0.1>nul\r\ncopy \"%v\" \"%v\" >nul\r\ndel \"%v\" >nul\r\n\r\n",
				tmpBinPath, movePath, tmpBinPath))
			//f.WriteString(fmt.Sprintf("del \"%v\"\r\n", batPath))
			f.Close()

			attr := &os.ProcAttr{
				Dir:   workDir,
				Env:   os.Environ(),
				Files: []*os.File{os.Stdin, os.Stdout, os.Stderr},
			}

			_, err = os.StartProcess(batPath, []string{batPath}, attr)
			if err != nil {
				log.Error("Update", "Fail to start bat process")
				log.Fatal("", err.Error())
			}
		}

		log.Success("SUCC", "Update", "Command execute successfully!")
		isAnythingUpdated = true
	}

	// Save JSON.
	f, err := os.Create(path.Join(doc.HomeDir, doc.VER_PATH))
	if err != nil {
		log.Error("Update", "Fail to create VERSION.json")
		log.Fatal("", err.Error())
	}
	if err := json.NewEncoder(f).Encode(&remoteVerInfo); err != nil {
		log.Error("Update", "Fail to encode VERSION.json")
		log.Fatal("", err.Error())
	}

	if !isAnythingUpdated {
		log.Log("Nothing need to be updated")
	}
	log.Log("Exit old gopm")
}
Exemple #13
0
func getGithubDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*hv.Package, error) {
	match["cred"] = githubCred

	// Get master commit.
	var refs []*struct {
		Ref    string
		Url    string
		Object struct {
			Sha  string
			Type string
			Url  string
		}
	}

	err := com.HttpGetJSON(client, com.Expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), &refs)
	if err != nil {
		if strings.HasPrefix(err.Error(), "Resource not found") {
			return nil, com.NotFoundError{"doc.getGithubDoc(" + match["importPath"] + ") -> " + err.Error()}
		}
		return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> " + err.Error())
	}

	var commit string
	// Get all tags.
	tags := make([]string, 0, 5)
	for _, ref := range refs {
		switch {
		case strings.HasPrefix(ref.Ref, "refs/heads/master"):
			commit = ref.Object.Sha
		case strings.HasPrefix(ref.Ref, "refs/tags/"):
			tags = append(tags, ref.Ref[len("refs/tags/"):])
		}
	}

	if len(tags) > 0 {
		tags = append([]string{"master"}, tags...)
	}

	if len(tag) == 0 {
		// Check revision tag.
		if commit == savedEtag {
			return nil, errNotModified
		}

		match["tag"] = "master"
	} else {
		match["tag"] = tag
	}

	// Get files.
	var tree struct {
		Tree []struct {
			Url  string
			Path string
			Type string
		}
		Url string
	}

	err = com.HttpGetJSON(client, com.Expand("https://api.github.com/repos/{owner}/{repo}/git/trees/{tag}?recursive=1&{cred}", match), &tree)
	if err != nil {
		return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> get trees: " + err.Error())
	}

	// Because Github API URLs are case-insensitive, we need to check that the
	// userRepo returned from Github matches the one that we are requesting.
	if !strings.HasPrefix(tree.Url, com.Expand("https://api.github.com/repos/{owner}/{repo}/", match)) {
		return nil, errors.New("Github import path has incorrect case")
	}

	// Get source file data and subdirectories.
	dirPrefix := match["dir"]
	if dirPrefix != "" {
		dirPrefix = dirPrefix[1:] + "/"
	}
	preLen := len(dirPrefix)

	isGoPro := false // Indicates whether it's a Go project.
	dirs := make([]string, 0, 5)
	files := make([]com.RawFile, 0, 5)
	for _, node := range tree.Tree {
		// Skip directories and files in wrong directories, get them later.
		if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) {
			continue
		}

		// Get files and check if directories have acceptable files.
		if d, f := path.Split(node.Path); utils.IsDocFile(f) &&
			utils.FilterDirName(d) {
			// Check if it's a Go file.
			if !isGoPro && strings.HasSuffix(f, ".go") {
				isGoPro = true
			}

			// Check if file is in the directory that is corresponding to import path.
			if d == dirPrefix {
				// Yes.
				if !isGoPro && strings.HasSuffix(f, ".go") {
					isGoPro = true
				}
				files = append(files, &hv.Source{
					SrcName:   f,
					BrowseUrl: com.Expand("github.com/{owner}/{repo}/blob/{tag}/{0}", match, node.Path),
					RawSrcUrl: com.Expand("https://raw.github.com/{owner}/{repo}/{tag}/{0}", match, node.Path) + "?" + githubCred,
				})
			} else {
				sd, _ := path.Split(d[preLen:])
				sd = strings.TrimSuffix(sd, "/")
				if !checkDir(sd, dirs) {
					dirs = append(dirs, sd)
				}
			}
		}
	}

	if !isGoPro {
		return nil, com.NotFoundError{"Cannot find Go files, it's not a Go project"}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, com.NotFoundError{"Directory tree does not contain Go files and subdirs"}
	}

	// Fetch file from VCS.
	if err := com.FetchFiles(client, files, githubRawHeader); err != nil {
		return nil, err
	}

	// Get addtional information: forks, watchers.
	var note struct {
		Homepage string
		Fork     bool
		Parent   struct {
			Html string `json:"html_url"`
		}
		Issues int `json:"open_issues_count"`
		Stars  int `json:"watchers_count"`
		Forks  int `json:"forks_count"`
	}

	err = com.HttpGetJSON(client, com.Expand("https://api.github.com/repos/{owner}/{repo}?{cred}", match), &note)
	if err != nil {
		return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> get note: " + err.Error())
	}

	// Start generating data.
	w := &hv.Walker{
		LineFmt: "#L%d",
		Pdoc: &hv.Package{
			PkgInfo: &hv.PkgInfo{
				ImportPath:  match["importPath"],
				ProjectName: match["repo"],
				ProjectPath: com.Expand("github.com/{owner}/{repo}/blob/{tag}", match),
				ViewDirPath: com.Expand("github.com/{owner}/{repo}/blob/{tag}{dir}", match),
				Tags:        strings.Join(tags, "|||"),
				Ptag:        commit,
				Vcs:         "GitHub",
				Issues:      note.Issues,
				Stars:       note.Stars,
				Forks:       note.Forks,
			},
			PkgDecl: &hv.PkgDecl{
				Tag:  tag,
				Dirs: dirs,
			},
		},
	}

	if len(note.Homepage) > 0 {
		w.Pdoc.Homepage = note.Homepage
	}
	if note.Fork {
		w.Pdoc.ForkUrl = note.Parent.Html
	}

	srcs := make([]*hv.Source, 0, len(files))
	srcMap := make(map[string]*hv.Source)
	for _, f := range files {
		s, _ := f.(*hv.Source)
		srcs = append(srcs, s)

		if !strings.HasSuffix(f.Name(), "_test.go") {
			srcMap[f.Name()] = s
		}
	}

	pdoc, err := w.Build(&hv.WalkRes{
		WalkDepth: hv.WD_All,
		WalkType:  hv.WT_Memory,
		WalkMode:  hv.WM_All,
		Srcs:      srcs,
	})
	if err != nil {
		return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> Fail to build: " + err.Error())
	}

	if len(tag) == 0 && w.Pdoc.IsCmd {
		err = generateHv(match["importPath"], srcMap)
	}

	return pdoc, err
}
Exemple #14
0
// getGithubDoc downloads tarball from github.com.
func getGithubDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, ctx *cli.Context) ([]string, error) {
	match["cred"] = GetGithubCredentials()

	// Check downlaod type.
	switch nod.Type {
	case BRANCH:
		if len(nod.Value) == 0 {
			match["sha"] = MASTER

			// Only get and check revision with the latest version.
			var refs []*struct {
				Ref    string
				Url    string
				Object struct {
					Sha  string
					Type string
					Url  string
				}
			}

			err := com.HttpGetJSON(client, com.Expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), &refs)
			if err != nil {
				if strings.Contains(err.Error(), "403") {
					break
				}
				log.Warn("GET", "Fail to get revision")
				log.Warn("", err.Error())
				break
			}

			var etag string
		COMMIT_LOOP:
			for _, ref := range refs {
				switch {
				case strings.HasPrefix(ref.Ref, "refs/heads/master"):
					etag = ref.Object.Sha
					break COMMIT_LOOP
				}
			}
			if etag == nod.Revision {
				log.Log("GET Package hasn't changed: %s", nod.ImportPath)
				return nil, nil
			}
			nod.Revision = etag

		} else {
			match["sha"] = nod.Value
		}
	case TAG, COMMIT:
		match["sha"] = nod.Value
	default:
		return nil, errors.New("Unknown node type: " + nod.Type)
	}

	// We use .zip here.
	// zip: https://github.com/{owner}/{repo}/archive/{sha}.zip
	// tarball: https://github.com/{owner}/{repo}/tarball/{sha}

	// Downlaod archive.
	p, err := com.HttpGetBytes(client, com.Expand("https://github.com/{owner}/{repo}/archive/{sha}.zip", match), nil)
	if err != nil {
		return nil, errors.New("Fail to donwload Github repo -> " + err.Error())
	}

	shaName := com.Expand("{repo}-{sha}", match)
	if nod.Type == "tag" {
		shaName = strings.Replace(shaName, "-v", "-", 1)
	}

	var installPath string
	if nod.ImportPath == nod.DownloadURL {
		suf := "." + nod.Value
		if len(suf) == 1 {
			suf = ""
		}
		projectPath := com.Expand("github.com/{owner}/{repo}", match)
		installPath = installRepoPath + "/" + projectPath + suf
		nod.ImportPath = projectPath
	} else {
		installPath = installRepoPath + "/" + nod.ImportPath
	}

	// Remove old files.
	os.RemoveAll(installPath + "/")
	os.MkdirAll(installPath+"/", os.ModePerm)

	r, err := zip.NewReader(bytes.NewReader(p), int64(len(p)))
	if err != nil {
		return nil, errors.New(nod.ImportPath + " -> new zip: " + err.Error())
	}

	dirs := make([]string, 0, 5)
	// Need to add root path because we cannot get from tarball.
	dirs = append(dirs, installPath+"/")
	for _, f := range r.File {
		absPath := strings.Replace(f.Name, shaName, installPath, 1)
		// Create diretory before create file.
		os.MkdirAll(path.Dir(absPath)+"/", os.ModePerm)

	compareDir:
		switch {
		case strings.HasSuffix(absPath, "/"): // Directory.
			// Check if current directory is example.
			if !(!ctx.Bool("example") && strings.Contains(absPath, "example")) {
				for _, d := range dirs {
					if d == absPath {
						break compareDir
					}
				}
				dirs = append(dirs, absPath)
			}
		default:
			// Get file from archive.
			r, err := f.Open()
			if err != nil {
				return nil, err
			}

			fbytes := make([]byte, f.FileInfo().Size())
			_, err = io.ReadFull(r, fbytes)
			if err != nil {
				return nil, err
			}

			if err = com.WriteFile(absPath, fbytes); err != nil {
				return nil, err
			}

			// Set modify time.
			os.Chtimes(absPath, f.ModTime(), f.ModTime())
		}
	}

	var imports []string

	// Check if need to check imports.
	if nod.IsGetDeps {
		for _, d := range dirs {
			importPkgs, err := CheckImports(d, match["importPath"], nod)
			if err != nil {
				return nil, err
			}
			imports = append(imports, importPkgs...)
		}
	}
	return imports, err
}
Exemple #15
0
func ExampleHttpGetJSON() interface{} {
	j := com.HttpGetJSON(&http.Client{}, "http://gowalker.org", nil)
	return j
}
Exemple #16
0
func getGolangDoc(importPath, etag string) (*Package, error) {
	match := map[string]string{
		"cred": setting.GitHubCredentials,
	}

	// Check revision.
	commit, err := getGithubRevision("github.com/golang/go")
	if err != nil {
		return nil, fmt.Errorf("get revision: %v", err)
	}
	if commit == etag {
		return nil, ErrPackageNotModified
	}

	// Get files.
	var tree struct {
		Tree []struct {
			Url  string
			Path string
			Type string
		}
		Url string
	}

	if err := com.HttpGetJSON(Client,
		com.Expand("https://api.github.com/repos/golang/go/git/trees/master?recursive=1&{cred}", match), &tree); err != nil {
		return nil, fmt.Errorf("get tree: %v", err)
	}

	dirPrefix := "src/" + importPath + "/"
	dirLevel := len(strings.Split(dirPrefix, "/"))
	dirLength := len(dirPrefix)
	dirMap := make(map[string]bool)
	files := make([]com.RawFile, 0, 10)

	for _, node := range tree.Tree {
		// Skip directories and files in irrelevant directories.
		if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) {
			continue
		}

		// Get files and check if directories have acceptable files.
		if d, f := path.Split(node.Path); IsDocFile(f) {
			// Check if file is in the directory that is corresponding to import path.
			if d == dirPrefix {
				files = append(files, &Source{
					SrcName:   f,
					BrowseUrl: com.Expand("github.com/golang/go/blob/master/{0}", nil, node.Path),
					RawSrcUrl: com.Expand("https://raw.github.com/golang/go/master/{0}?{1}", nil, node.Path, setting.GitHubCredentials),
				})
				continue
			}

			// Otherwise, check if it's a direct sub-directory of import path.
			if len(strings.Split(d, "/"))-dirLevel == 1 {
				dirMap[d[dirLength:len(d)-1]] = true
				continue
			}
		}
	}

	dirs := base.MapToSortedStrings(dirMap)

	if len(files) == 0 && len(dirs) == 0 {
		return nil, ErrPackageNoGoFile
	} else if err := com.FetchFiles(Client, files, githubRawHeader); err != nil {
		return nil, fmt.Errorf("fetch files: %v", err)
	}

	// Start generating data.
	w := &Walker{
		LineFmt: "#L%d",
		Pdoc: &Package{
			PkgInfo: &models.PkgInfo{
				ImportPath:  importPath,
				ProjectPath: "github.com/golang/go",
				ViewDirPath: "github.com/golang/go/tree/master/src/" + importPath,
				Etag:        commit,
				IsGoRepo:    true,
				Subdirs:     strings.Join(dirs, "|"),
			},
		},
	}

	srcs := make([]*Source, 0, len(files))
	srcMap := make(map[string]*Source)
	for _, f := range files {
		s := f.(*Source)
		srcs = append(srcs, s)

		if !strings.HasSuffix(f.Name(), "_test.go") {
			srcMap[f.Name()] = s
		}
	}

	pdoc, err := w.Build(&WalkRes{
		WalkDepth: WD_All,
		WalkType:  WT_Memory,
		WalkMode:  WM_All,
		Srcs:      srcs,
	})
	if err != nil {
		return nil, fmt.Errorf("walk package: %v", err)
	}

	return pdoc, nil
}
Exemple #17
0
func HttpGetJSON(url string, values url.Values, data interface{}) error {
	if values != nil {
		url = url + "&" + values.Encode()
	}
	return com.HttpGetJSON(httpClient, url, data)
}