Exemplo n.º 1
0
func getStandardDoc(client *http.Client, importPath string, savedEtag string) (pdoc *Package, err error) {
	p, err := httpGetBytes(client, "http://go.googlecode.com/hg-history/release/src/pkg/"+importPath+"/", nil)
	if err != nil {
		return nil, err
	}

	// Check revision tag.
	var etag string
	if m := googleRevisionRe.FindSubmatch(p); m == nil {
		return nil, errors.New("doc.getStandardDoc(): Could not find revision for " + importPath)
	} else {
		etag = string(m[1])
		if etag == savedEtag {
			return nil, errNotModified
		}
	}

	// Get source file data.
	files := make([]*source, 0, 5)
	for _, m := range googleFileRe.FindAllSubmatch(p, -1) {
		fname := strings.Split(string(m[1]), "?")[0]
		if utils.IsDocFile(fname) {
			files = append(files, &source{
				name:      fname,
				browseURL: "http://code.google.com/p/go/source/browse/src/pkg/" + importPath + "/" + fname + "?name=release",
				rawURL:    "http://go.googlecode.com/hg-history/release/src/pkg/" + importPath + "/" + fname,
			})
		}
	}

	if len(files) == 0 {
		return nil, NotFoundError{"Directory tree does not contain Go files."}
	}

	dirs := make([]string, 0, 5)
	// Get subdirectories.
	for _, m := range googleDirRe.FindAllSubmatch(p, -1) {
		dirName := strings.Split(string(m[1]), "?")[0]
		dirs = append(dirs, strings.Replace(dirName, "/", "", -1))
	}

	// Fetch file from VCS.
	if err := fetchFiles(client, files, nil); err != nil {
		return nil, err
	}

	// Start generating data.
	w := &walker{
		lineFmt: "#%d",
		pdoc: &Package{
			ImportPath:  importPath,
			ProjectName: "Go",
			Etag:        etag,
			Dirs:        dirs,
		},
	}
	return w.build(files)
}
Exemplo n.º 2
0
// Only support .zip.
func getRepoByArchive(match map[string]string, downloadPath string) (bool, string, []com.RawFile, []string, error) {
	stdout, _, err := com.ExecCmd("curl", downloadPath)
	if err != nil {
		return false, "", nil, nil, err
	}
	p := []byte(stdout)

	r, err := zip.NewReader(bytes.NewReader(p), int64(len(p)))
	if err != nil {
		return false, "", nil, nil, errors.New(downloadPath + " -> new zip: " + err.Error())
	}

	if len(r.File) == 0 {
		return false, "", nil, nil, nil
	}

	nameLen := strings.Index(r.File[0].Name, "/")
	dirPrefix := match["dir"]
	if len(dirPrefix) != 0 {
		dirPrefix = dirPrefix[1:] + "/"
	}
	preLen := len(dirPrefix)
	isGoPro := false

	// for k, v := range match {
	// 	println(k, v)
	// }
	comment := r.Comment

	files := make([]com.RawFile, 0, 5)
	dirs := make([]string, 0, 5)
	for _, f := range r.File {
		fileName := f.Name[nameLen+1:]
		// Skip directories and files in wrong directories, get them later.
		if strings.HasSuffix(fileName, "/") || !strings.HasPrefix(fileName, dirPrefix) {
			continue
		}
		//fmt.Println(fileName)

		// Get files and check if directories have acceptable files.
		if d, fn := path.Split(fileName); utils.IsDocFile(fn) &&
			utils.FilterDirName(d) {
			// Check if it's a Go file.
			if !isGoPro && strings.HasSuffix(fn, ".go") {
				isGoPro = true
			}

			// Check if file is in the directory that is corresponding to import path.
			if d == dirPrefix {
				// Yes.
				if !isGoPro && strings.HasSuffix(fn, ".go") {
					isGoPro = true
				}
				// Get file from archive.
				rc, err := f.Open()
				if err != nil {
					return isGoPro, comment, files, dirs,
						errors.New(downloadPath + " -> open file: " + err.Error())
				}

				p := make([]byte, f.FileInfo().Size())
				rc.Read(p)
				if err != nil {
					return isGoPro, comment, files, dirs,
						errors.New(downloadPath + " -> read file: " + err.Error())
				}
				//fmt.Println(com.Expand(match["browserUrlTpl"], match, fn))
				files = append(files, &hv.Source{
					SrcName:   fn,
					BrowseUrl: com.Expand(match["browserUrlTpl"], match, fn),
					RawSrcUrl: com.Expand(match["rawSrcUrlTpl"], match, fileName[preLen:]),
					SrcData:   p,
				})

			} else {
				sd, _ := path.Split(d[preLen:])
				sd = strings.TrimSuffix(sd, "/")
				if !checkDir(sd, dirs) {
					dirs = append(dirs, sd)
				}
			}
		}
	}
	return isGoPro, comment, files, dirs, nil
}
Exemplo n.º 3
0
func getVCSDoc(client *http.Client, match map[string]string, etagSaved string) (*hv.Package, error) {
	cmd := vcsCmds[match["vcs"]]
	if cmd == nil {
		return nil, com.NotFoundError{com.Expand("VCS not supported: {vcs}", match)}
	}

	scheme := match["scheme"]
	if scheme == "" {
		i := strings.Index(etagSaved, "-")
		if i > 0 {
			scheme = etagSaved[:i]
		}
	}

	schemes := cmd.schemes
	if scheme != "" {
		for i := range cmd.schemes {
			if cmd.schemes[i] == scheme {
				schemes = cmd.schemes[i : i+1]
				break
			}
		}
	}

	// Download and checkout.

	tag, _, err := cmd.download(schemes, match["repo"], etagSaved)
	if err != nil {
		return nil, err
	}

	// Find source location.

	urlTemplate, urlMatch, lineFmt := lookupURLTemplate(match["repo"], match["dir"], tag)

	// Slurp source files.

	d := path.Join(repoRoot, com.Expand("{repo}.{vcs}", match), match["dir"])
	f, err := os.Open(d)
	if err != nil {
		if os.IsNotExist(err) {
			err = com.NotFoundError{err.Error()}
		}
		return nil, err
	}
	fis, err := f.Readdir(-1)
	if err != nil {
		return nil, err
	}

	// Get source file data.
	var files []com.RawFile
	for _, fi := range fis {
		if fi.IsDir() || !utils.IsDocFile(fi.Name()) {
			continue
		}
		b, err := ioutil.ReadFile(path.Join(d, fi.Name()))
		if err != nil {
			return nil, err
		}
		files = append(files, &hv.Source{
			SrcName:   fi.Name(),
			BrowseUrl: com.Expand(urlTemplate, urlMatch, fi.Name()),
			SrcData:   b,
		})
	}

	// Start generating data.
	w := &hv.Walker{
		LineFmt: lineFmt,
		Pdoc: &hv.Package{
			PkgInfo: &hv.PkgInfo{
				ImportPath:  match["importPath"],
				ProjectName: match["repo"],
			},
		},
	}

	srcs := make([]*hv.Source, 0, len(files))
	for _, f := range files {
		s, _ := f.(*hv.Source)
		srcs = append(srcs, s)
	}

	return w.Build(&hv.WalkRes{
		WalkDepth: hv.WD_All,
		WalkType:  hv.WT_Memory,
		WalkMode:  hv.WM_All,
		Srcs:      srcs,
	})
}
Exemplo n.º 4
0
func getGithubDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) {
	match["cred"] = githubCred

	// Get master commit.
	var refs []*struct {
		Ref    string
		Url    string
		Object struct {
			Sha  string
			Type string
			Url  string
		}
	}

	err := httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), &refs)
	if err != nil {
		return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> " + err.Error())
	}

	var commit string
	// Get all tags.
	tags := make([]string, 0, 5)
	for _, ref := range refs {
		switch {
		case strings.HasPrefix(ref.Ref, "refs/heads/master"):
			commit = ref.Object.Sha
		case strings.HasPrefix(ref.Ref, "refs/tags/"):
			tags = append(tags, ref.Ref[len("refs/tags/"):])
		}
	}

	if len(tags) > 5 {
		tags = tags[len(tags)-5:]
	}
	tags = append([]string{"master"}, tags...)

	if len(tag) == 0 {
		// Check revision tag.
		if commit == savedEtag {
			return nil, errNotModified
		}

		match["tag"] = "master"
	} else {
		match["tag"] = tag
	}

	// Get files and subdirectories.
	var tree struct {
		Tree []struct {
			Url  string
			Path string
			Type string
		}
		Url string
	}

	err = httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/trees/{tag}?recursive=1&{cred}", match), &tree)
	if err != nil {
		return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> get tress: " + err.Error())
	}

	// Because Github API URLs are case-insensitive, we need to check that the
	// userRepo returned from Github matches the one that we are requesting.
	if !strings.HasPrefix(tree.Url, expand("https://api.github.com/repos/{owner}/{repo}/", match)) {
		return nil, NotFoundError{"Github import path has incorrect case."}
	}

	dirPrefix := match["dir"]
	if dirPrefix != "" {
		dirPrefix = dirPrefix[1:] + "/"
	}
	preLen := len(dirPrefix)

	// Get source file data and subdirectories.
	dirs := make([]string, 0, 5)
	files := make([]*source, 0, 5)
	for _, node := range tree.Tree {
		if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) {
			if len(dirPrefix) > 0 && strings.HasPrefix(node.Path, dirPrefix) {
				p := node.Path[preLen:]
				dirs = append(dirs, p)
			} else if len(dirPrefix) == 0 && strings.Index(node.Path, "/") == -1 &&
				utils.FilterFileName(node.Path) {
				dirs = append(dirs, node.Path)
			}
			continue
		}
		if d, f := path.Split(node.Path); d == dirPrefix && utils.IsDocFile(f) {
			files = append(files, &source{
				name:      f,
				browseURL: expand("https://github.com/{owner}/{repo}/blob/{tag}/{0}", match, node.Path),
				rawURL:    node.Url + "?" + githubCred,
			})
		}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	// Fetch file from VCS.
	if err := fetchFiles(client, files, githubRawHeader); err != nil {
		return nil, err
	}

	/*browseURL := expand("https://github.com/{owner}/{repo}", match)
	if match["dir"] != "" {
		browseURL = expand("https://github.com/{owner}/{repo}/tree/{tag}{dir}", match)
	}*/

	// Start generating data.
	w := &walker{
		lineFmt: "#L%d",
		pdoc: &Package{
			ImportPath:  match["importPath"],
			ProjectName: match["repo"],
			Tags:        tags,
			Tag:         tag,
			Etag:        commit,
			Dirs:        dirs,
		},
	}
	return w.build(files)
}
Exemplo n.º 5
0
func getBitbucketDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*hv.Package, error) {

	if m := bitbucketEtagRe.FindStringSubmatch(savedEtag); m != nil {
		match["vcs"] = m[1]
	} else {
		var repo struct {
			Scm string
		}
		if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil {
			return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> " + err.Error())
		}
		match["vcs"] = repo.Scm
	}

	// Get master commit.
	var branches map[string]struct {
		Node string
	}
	if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/branches", match), &branches); err != nil {
		return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get branches: " + err.Error())
	}
	match["commit"] = branches["default"].Node

	// Get all tags.
	tags := make([]string, 0, 5)
	var nodes map[string]struct {
		Node string
	}
	if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/tags", match), &nodes); err != nil {
		return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get nodes: " + err.Error())
	}
	for k := range nodes {
		tags = append(tags, k)
	}
	if len(tags) > 0 {
		tags = append([]string{defaultTags[match["vcs"]]}, tags...)
	}

	var etag string
	if len(tag) == 0 {
		// Check revision tag.
		etag = match["commit"]
		if etag == savedEtag {
			return nil, errNotModified
		}

		match["tag"] = defaultTags[match["vcs"]]
	} else {
		match["tag"] = tag
	}

	// Get files and directories.
	var node struct {
		Files []struct {
			Path string
		}
		Directories []string
	}

	if err := com.HttpGetJSON(client, com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/src/{tag}{dir}/", match), &node); err != nil {
		return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get trees: " + err.Error())
	}

	// Get source file data.
	files := make([]com.RawFile, 0, 5)
	for _, f := range node.Files {
		_, name := path.Split(f.Path)
		if utils.IsDocFile(name) {
			files = append(files, &hv.Source{
				SrcName:   name,
				BrowseUrl: com.Expand("bitbucket.org/{owner}/{repo}/src/{tag}/{0}", match, f.Path),
				RawSrcUrl: com.Expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/raw/{tag}/{0}", match, f.Path),
			})
		}
	}

	// Get subdirectories.
	dirs := make([]string, 0, len(node.Directories))
	for _, d := range node.Directories {
		if utils.FilterDirName(d) {
			dirs = append(dirs, d)
		}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, com.NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	// Fetch file from VCS.
	if err := com.FetchFiles(client, files, nil); err != nil {
		return nil, err
	}

	// Start generating data.
	w := &hv.Walker{
		LineFmt: "#cl-%d",
		Pdoc: &hv.Package{
			PkgInfo: &hv.PkgInfo{
				ImportPath:  match["importPath"],
				ProjectName: match["repo"],
				ProjectPath: com.Expand("bitbucket.org/{owner}/{repo}/src/{tag}/", match),
				ViewDirPath: com.Expand("bitbucket.org/{owner}/{repo}/src/{tag}{dir}/", match),
				Tags:        strings.Join(tags, "|||"),
				Ptag:        etag,
				Vcs:         "BitBucket",
			},
			PkgDecl: &hv.PkgDecl{
				Tag:  tag,
				Dirs: dirs,
			},
		},
	}

	srcs := make([]*hv.Source, 0, len(files))
	srcMap := make(map[string]*hv.Source)
	for _, f := range files {
		s, _ := f.(*hv.Source)
		srcs = append(srcs, s)

		if !strings.HasSuffix(f.Name(), "_test.go") {
			srcMap[f.Name()] = s
		}
	}

	pdoc, err := w.Build(&hv.WalkRes{
		WalkDepth: hv.WD_All,
		WalkType:  hv.WT_Memory,
		WalkMode:  hv.WM_All,
		Srcs:      srcs,
	})
	if err != nil {
		return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> Fail to build: " + err.Error())
	}

	if len(tag) == 0 && w.Pdoc.IsCmd {
		err = generateHv(match["importPath"], srcMap)
	}

	return pdoc, err
}
Exemplo n.º 6
0
func getStandardDoc(client *http.Client, importPath, tag, ptag string) (*hv.Package, error) {
	// hg-higtory: http://go.googlecode.com/hg-history/release/src/pkg/"+importPath+"/"
	stdout, _, err := com.ExecCmd("curl", "http://go.googlecode.com/hg/src/pkg/"+importPath+"/?r="+tag)
	if err != nil {
		return nil, errors.New("doc.getStandardDoc(" + importPath + ") -> " + err.Error())
	}
	p := []byte(stdout)

	// Check revision tag.
	var etag string
	if m := googleRevisionRe.FindSubmatch(p); m == nil {
		return nil, errors.New("doc.getStandardDoc(" + importPath + ") -> Could not find revision")
	} else {
		etag = string(m[1])
		if etag == ptag {
			return nil, errNotModified
		}
	}

	// Get source file data.
	ms := googleFileRe.FindAllSubmatch(p, -1)
	files := make([]com.RawFile, 0, len(ms))
	for _, m := range ms {
		fname := strings.Split(string(m[1]), "?")[0]
		if utils.IsDocFile(fname) {
			files = append(files, &hv.Source{
				SrcName:   fname,
				BrowseUrl: "code.google.com/p/go/source/browse/src/pkg/" + importPath + "/" + fname + "?r=" + tag,
				RawSrcUrl: "http://go.googlecode.com/hg/src/pkg/" + importPath + "/" + fname + "?r=" + tag,
			})
		}
	}

	// Get subdirectories.
	ms = googleDirRe.FindAllSubmatch(p, -1)
	dirs := make([]string, 0, len(ms))
	for _, m := range ms {
		dirName := strings.Split(string(m[1]), "?")[0]
		// Make sure we get directories.
		if strings.HasSuffix(dirName, "/") &&
			utils.FilterDirName(dirName) {
			dirs = append(dirs, strings.Replace(dirName, "/", "", -1))
		}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, com.NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	// Fetch file from VCS.
	if err := fetchGoogleFiles(client, files); err != nil {
		return nil, err
	}

	// Get all tags.
	tags := getGoogleTags("code.google.com/p/go/"+importPath, "default", true)

	// Start generating data.
	w := &hv.Walker{
		LineFmt: "#%d",
		Pdoc: &hv.Package{
			PkgInfo: &hv.PkgInfo{
				ImportPath:  importPath,
				ProjectName: "Go",
				ProjectPath: "code.google.com/p/go/source/browse/src/pkg/?r=" + tag,
				ViewDirPath: "code.google.com/p/go/source/browse/src/pkg/" + importPath + "/?r=" + tag,
				IsGoRepo:    true,
				Tags:        strings.Join(tags, "|||"),
				Ptag:        etag,
				Vcs:         "Google Code",
			},
			PkgDecl: &hv.PkgDecl{
				Tag:  tag,
				Dirs: dirs,
			},
		},
	}

	srcs := make([]*hv.Source, 0, len(files))
	srcMap := make(map[string]*hv.Source)
	for _, f := range files {
		s, _ := f.(*hv.Source)
		srcs = append(srcs, s)

		if len(tag) == 0 && !strings.HasSuffix(f.Name(), "_test.go") {
			srcMap[f.Name()] = s
		}
	}

	pdoc, err := w.Build(&hv.WalkRes{
		WalkDepth: hv.WD_All,
		WalkType:  hv.WT_Memory,
		WalkMode:  hv.WM_All,
		Srcs:      srcs,
	})
	if err != nil {
		return nil, errors.New("doc.getStandardDoc(" + importPath + ") -> Fail to build: " + err.Error())
	}

	return pdoc, generateHv(importPath, srcMap)
}
Exemplo n.º 7
0
func getGithubDoc(client *http.Client, match map[string]string, savedEtag string) (*Package, error) {
	SetGithubCredentials("1862bcb265171f37f36c", "308d71ab53ccd858416cfceaed52d5d5b7d53c5f")
	match["cred"] = githubCred

	var refs []*struct {
		Object struct {
			Type string
			Sha  string
			Url  string
		}
		Ref string
		Url string
	}

	err := httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), &refs)
	if err != nil {
		return nil, err
	}

	tags := make(map[string]string)
	for _, ref := range refs {
		switch {
		case strings.HasPrefix(ref.Ref, "refs/heads/"):
			tags[ref.Ref[len("refs/heads/"):]] = ref.Object.Sha
		case strings.HasPrefix(ref.Ref, "refs/tags/"):
			tags[ref.Ref[len("refs/tags/"):]] = ref.Object.Sha
		}
	}

	// Check revision tag.
	var commit string
	match["tag"], commit, err = bestTag(tags, "master")
	if err != nil {
		return nil, err
	}

	if commit == savedEtag {
		return nil, errNotModified
	}

	var tree struct {
		Tree []struct {
			Url  string
			Path string
			Type string
		}
		Url string
	}

	err = httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/trees/{tag}?recursive=1&{cred}", match), &tree)
	if err != nil {
		return nil, err
	}

	// Because Github API URLs are case-insensitive, we need to check that the
	// userRepo returned from Github matches the one that we are requesting.
	if !strings.HasPrefix(tree.Url, expand("https://api.github.com/repos/{owner}/{repo}/", match)) {
		return nil, NotFoundError{"Github import path has incorrect case."}
	}

	dirPrefix := match["dir"]
	if dirPrefix != "" {
		dirPrefix = dirPrefix[1:] + "/"
	}
	preLen := len(dirPrefix)

	// Get source file data.
	dirs := make([]string, 0, 3)
	files := make([]*source, 0, 5)
	for _, node := range tree.Tree {
		if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) {
			if len(dirPrefix) > 0 && strings.HasPrefix(node.Path, dirPrefix) {
				p := node.Path[preLen:]
				dirs = append(dirs, p)
			} else if len(dirPrefix) == 0 && strings.Index(node.Path, "/") == -1 {
				dirs = append(dirs, node.Path)
			}
			continue
		}
		if d, f := path.Split(node.Path); d == dirPrefix && utils.IsDocFile(f) {
			files = append(files, &source{
				name:      f,
				browseURL: expand("https://github.com/{owner}/{repo}/blob/{tag}/{0}", match, node.Path),
				rawURL:    node.Url + "?" + githubCred,
			})
		}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	// Fetch file from VCS.
	if err := fetchFiles(client, files, githubRawHeader); err != nil {
		return nil, err
	}

	/*browseURL := expand("https://github.com/{owner}/{repo}", match)
	if match["dir"] != "" {
		browseURL = expand("https://github.com/{owner}/{repo}/tree/{tag}{dir}", match)
	}*/

	// Start generating data.
	w := &walker{
		lineFmt: "#L%d",
		pdoc: &Package{
			ImportPath:  match["importPath"],
			ProjectName: match["repo"],
			Etag:        commit,
			Dirs:        dirs,
		},
	}
	return w.build(files)
}
Exemplo n.º 8
0
func getBitbucketDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) {

	if m := bitbucketEtagRe.FindStringSubmatch(savedEtag); m != nil {
		match["vcs"] = m[1]
	} else {
		var repo struct {
			Scm string
		}
		if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil {
			return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> " + err.Error())
		}
		match["vcs"] = repo.Scm
	}

	// Get master commit.
	var branches map[string]struct {
		Node string
	}
	if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/branches", match), &branches); err != nil {
		return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get branches: " + err.Error())
	}
	match["commit"] = branches["default"].Node

	// Get all tags.
	tags := make([]string, 0, 5)
	var nodes map[string]struct {
		Node string
	}
	if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/tags", match), &nodes); err != nil {
		return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get nodes: " + err.Error())
	}
	for k := range nodes {
		tags = append(tags, k)
	}
	if len(tags) > 5 {
		tags = tags[len(tags)-5:]
	}
	tags = append([]string{defaultTags[match["vcs"]]}, tags...)

	var etag string
	if len(tag) == 0 {
		// Check revision tag.
		etag = expand("{vcs}-{commit}", match)
		if etag == savedEtag {
			return nil, errNotModified
		}

		match["tag"] = defaultTags[match["vcs"]]
	} else {
		match["tag"] = tag
	}

	// Get files and directories.
	var node struct {
		Files []struct {
			Path string
		}
		Directories []string
	}

	if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/src/{tag}{dir}/", match), &node); err != nil {
		return nil, errors.New("doc.getBitbucketDoc(" + match["importPath"] + ") -> get trees: " + err.Error())
	}

	// Get source file data.
	files := make([]*source, 0, 5)
	for _, f := range node.Files {
		_, name := path.Split(f.Path)
		if utils.IsDocFile(name) {
			files = append(files, &source{
				name:      name,
				browseURL: expand("https://bitbucket.org/{owner}/{repo}/src/{tag}/{0}", match, f.Path),
				rawURL:    expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/raw/{tag}/{0}", match, f.Path),
			})
		}
	}

	// Get subdirectories.
	dirs := make([]string, 0, len(node.Directories))
	for _, d := range node.Directories {
		if utils.FilterFileName(d) {
			dirs = append(dirs, d)
		}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	// Fetch file from VCS.
	if err := fetchFiles(client, files, nil); err != nil {
		return nil, err
	}

	// Start generating data.
	w := &walker{
		lineFmt: "#cl-%d",
		pdoc: &Package{
			ImportPath:  match["importPath"],
			ProjectName: match["repo"],
			Tags:        tags,
			Tag:         tag,
			Etag:        etag,
			Dirs:        dirs,
		},
	}
	return w.build(files)
}
Exemplo n.º 9
0
func getLaunchpadDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*hv.Package, error) {

	if match["project"] != "" && match["series"] != "" {
		rc, err := com.HttpGet(client, com.Expand("https://code.launchpad.net/{project}{series}/.bzr/branch-format", match), nil)
		switch {
		case err == nil:
			rc.Close()
			// The structure of the import path is launchpad.net/{root}/{dir}.
		case isNotFound(err):
			// The structure of the import path is is launchpad.net/{project}/{dir}.
			match["repo"] = match["project"]
			match["dir"] = com.Expand("{series}{dir}", match)
		default:
			return nil, err
		}
	}

	// Scrape the repo browser to find the project revision and individual Go files.
	p, err := com.HttpGetBytes(client, com.Expand("https://bazaar.launchpad.net/+branch/{repo}/tarball", match), nil)
	if err != nil {
		return nil, err
	}

	// Get source file data.
	gzr, err := gzip.NewReader(bytes.NewReader(p))
	if err != nil {
		return nil, err
	}
	defer gzr.Close()

	tr := tar.NewReader(gzr)

	var hash []byte
	dirPrefix := com.Expand("+branch/{repo}{dir}/", match)
	preLen := len(dirPrefix)

	isGoPro := false // Indicates whether it's a Go project.
	isRootPath := match["importPath"] == utils.GetProjectPath(match["importPath"])
	dirs := make([]string, 0, 3)
	files := make([]com.RawFile, 0, 5)
	for {
		h, err := tr.Next()
		if err == io.EOF {
			break
		}
		if err != nil {
			return nil, err
		}

		// Skip directories and files in wrong directories, get them later.
		if strings.HasSuffix(h.Name, "/") || !strings.HasPrefix(h.Name, dirPrefix) {
			continue
		}

		d, f := path.Split(h.Name)
		if utils.IsDocFile(f) && utils.FilterDirName(d) {
			// Check if it's a Go file.
			if isRootPath && !isGoPro && strings.HasSuffix(f, ".go") {
				isGoPro = true
			}

			// Get file from archive.
			b := make([]byte, h.Size)
			if _, err := io.ReadFull(tr, b); err != nil {
				return nil, err
			}

			m := md5.New()
			m.Write(b)
			hash = m.Sum(hash)

			// Check if file is in the directory that is corresponding to import path.
			if d == dirPrefix {
				// Yes.
				if !isRootPath && !isGoPro && strings.HasSuffix(f, ".go") {
					isGoPro = true
				}
				files = append(files, &hv.Source{
					SrcName:   f,
					BrowseUrl: com.Expand("bazaar.launchpad.net/+branch/{repo}/view/head:{dir}/{0}", match, f),
					SrcData:   b})
			} else {
				sd, _ := path.Split(d[preLen:])
				sd = strings.TrimSuffix(sd, "/")
				if !checkDir(sd, dirs) {
					dirs = append(dirs, sd)
				}
			}
		}
	}

	if !isGoPro {
		return nil, com.NotFoundError{"Cannot find Go files, it's not a Go project."}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, com.NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	sort.Sort(byHash(hash))
	m := md5.New()
	m.Write(hash)
	hash = m.Sum(hash[:0])
	etag := hex.EncodeToString(hash)
	if etag == savedEtag {
		return nil, errNotModified
	}

	// Start generating data.
	w := &hv.Walker{
		LineFmt: "#L%d",
		Pdoc: &hv.Package{
			PkgInfo: &hv.PkgInfo{
				ImportPath:  match["importPath"],
				ProjectName: match["repo"],
				ProjectPath: com.Expand("bazaar.launchpad.net/+branch/{repo}/files", match),
				ViewDirPath: com.Expand("bazaar.launchpad.net/+branch/{repo}/files/head:{dir}/", match),
				Ptag:        etag,
				Vcs:         "Launchpad",
			},
			PkgDecl: &hv.PkgDecl{
				Dirs: dirs,
			},
		},
	}

	srcs := make([]*hv.Source, 0, len(files))
	srcMap := make(map[string]*hv.Source)
	for _, f := range files {
		s, _ := f.(*hv.Source)
		srcs = append(srcs, s)

		if !strings.HasSuffix(f.Name(), "_test.go") {
			srcMap[f.Name()] = s
		}
	}

	pdoc, err := w.Build(&hv.WalkRes{
		WalkDepth: hv.WD_All,
		WalkType:  hv.WT_Memory,
		WalkMode:  hv.WM_All,
		Srcs:      srcs,
	})
	if err != nil {
		return nil, errors.New("doc.getLaunchpadDoc(" + match["importPath"] + ") -> Fail to build: " + err.Error())
	}

	if len(tag) == 0 && w.Pdoc.IsCmd {
		err = generateHv(match["importPath"], srcMap)
	}

	return pdoc, err
}
Exemplo n.º 10
0
func getGoogleDoc(client *http.Client, match map[string]string, savedEtag string) (*Package, error) {
	setupGoogleMatch(match)
	if m := googleEtagRe.FindStringSubmatch(savedEtag); m != nil {
		match["vcs"] = m[1]
	} else if err := getGoogleVCS(client, match); err != nil {
		return nil, err
	}

	// Scrape the repo browser to find the project revision and individual Go files.
	p, err := httpGetBytes(client, expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/", match), nil)
	if err != nil {
		return nil, err
	}

	// Check revision tag.
	var etag string
	if m := googleRevisionRe.FindSubmatch(p); m == nil {
		return nil, errors.New("doc.getGoogleDoc(): Could not find revision for " + match["importPath"])
	} else {
		etag = expand("{vcs}-{0}", match, string(m[1]))
		if etag == savedEtag {
			return nil, errNotModified
		}
	}

	// Get source file data.
	var files []*source
	for _, m := range googleFileRe.FindAllSubmatch(p, -1) {
		fname := string(m[1])
		if utils.IsDocFile(fname) {
			files = append(files, &source{
				name:      fname,
				browseURL: expand("http://code.google.com/p/{repo}/source/browse{dir}/{0}{query}", match, fname),
				rawURL:    expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/{0}", match, fname),
			})
		}
	}

	if len(files) == 0 {
		return nil, NotFoundError{"Directory tree does not contain Go files."}
	}

	dirs := make([]string, 0, 5)
	// Get subdirectories.
	for _, m := range googleDirRe.FindAllSubmatch(p, -1) {
		dirName := strings.Split(string(m[1]), "?")[0]
		dirs = append(dirs, strings.Replace(dirName, "/", "", -1))
	}

	// Fetch file from VCS.
	if err := fetchFiles(client, files, nil); err != nil {
		return nil, err
	}

	// Start generating data.
	w := &walker{
		lineFmt: "#%d",
		pdoc: &Package{
			ImportPath:  match["importPath"],
			ProjectName: expand("{repo}{dot}{subrepo}", match),
			Etag:        etag,
			Dirs:        dirs,
		},
	}
	return w.build(files)
}
Exemplo n.º 11
0
func getGithubDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*hv.Package, error) {
	match["cred"] = githubCred

	// Get master commit.
	var refs []*struct {
		Ref    string
		Url    string
		Object struct {
			Sha  string
			Type string
			Url  string
		}
	}

	err := com.HttpGetJSON(client, com.Expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), &refs)
	if err != nil {
		if strings.HasPrefix(err.Error(), "Resource not found") {
			return nil, com.NotFoundError{"doc.getGithubDoc(" + match["importPath"] + ") -> " + err.Error()}
		}
		return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> " + err.Error())
	}

	var commit string
	// Get all tags.
	tags := make([]string, 0, 5)
	for _, ref := range refs {
		switch {
		case strings.HasPrefix(ref.Ref, "refs/heads/master"):
			commit = ref.Object.Sha
		case strings.HasPrefix(ref.Ref, "refs/tags/"):
			tags = append(tags, ref.Ref[len("refs/tags/"):])
		}
	}

	if len(tags) > 0 {
		tags = append([]string{"master"}, tags...)
	}

	if len(tag) == 0 {
		// Check revision tag.
		if commit == savedEtag {
			return nil, errNotModified
		}

		match["tag"] = "master"
	} else {
		match["tag"] = tag
	}

	// Get files.
	var tree struct {
		Tree []struct {
			Url  string
			Path string
			Type string
		}
		Url string
	}

	err = com.HttpGetJSON(client, com.Expand("https://api.github.com/repos/{owner}/{repo}/git/trees/{tag}?recursive=1&{cred}", match), &tree)
	if err != nil {
		return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> get trees: " + err.Error())
	}

	// Because Github API URLs are case-insensitive, we need to check that the
	// userRepo returned from Github matches the one that we are requesting.
	if !strings.HasPrefix(tree.Url, com.Expand("https://api.github.com/repos/{owner}/{repo}/", match)) {
		return nil, errors.New("Github import path has incorrect case")
	}

	// Get source file data and subdirectories.
	dirPrefix := match["dir"]
	if dirPrefix != "" {
		dirPrefix = dirPrefix[1:] + "/"
	}
	preLen := len(dirPrefix)

	isGoPro := false // Indicates whether it's a Go project.
	dirs := make([]string, 0, 5)
	files := make([]com.RawFile, 0, 5)
	for _, node := range tree.Tree {
		// Skip directories and files in wrong directories, get them later.
		if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) {
			continue
		}

		// Get files and check if directories have acceptable files.
		if d, f := path.Split(node.Path); utils.IsDocFile(f) &&
			utils.FilterDirName(d) {
			// Check if it's a Go file.
			if !isGoPro && strings.HasSuffix(f, ".go") {
				isGoPro = true
			}

			// Check if file is in the directory that is corresponding to import path.
			if d == dirPrefix {
				// Yes.
				if !isGoPro && strings.HasSuffix(f, ".go") {
					isGoPro = true
				}
				files = append(files, &hv.Source{
					SrcName:   f,
					BrowseUrl: com.Expand("github.com/{owner}/{repo}/blob/{tag}/{0}", match, node.Path),
					RawSrcUrl: com.Expand("https://raw.github.com/{owner}/{repo}/{tag}/{0}", match, node.Path) + "?" + githubCred,
				})
			} else {
				sd, _ := path.Split(d[preLen:])
				sd = strings.TrimSuffix(sd, "/")
				if !checkDir(sd, dirs) {
					dirs = append(dirs, sd)
				}
			}
		}
	}

	if !isGoPro {
		return nil, com.NotFoundError{"Cannot find Go files, it's not a Go project"}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, com.NotFoundError{"Directory tree does not contain Go files and subdirs"}
	}

	// Fetch file from VCS.
	if err := com.FetchFiles(client, files, githubRawHeader); err != nil {
		return nil, err
	}

	// Get addtional information: forks, watchers.
	var note struct {
		Homepage string
		Fork     bool
		Parent   struct {
			Html string `json:"html_url"`
		}
		Issues int `json:"open_issues_count"`
		Stars  int `json:"watchers_count"`
		Forks  int `json:"forks_count"`
	}

	err = com.HttpGetJSON(client, com.Expand("https://api.github.com/repos/{owner}/{repo}?{cred}", match), &note)
	if err != nil {
		return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> get note: " + err.Error())
	}

	// Start generating data.
	w := &hv.Walker{
		LineFmt: "#L%d",
		Pdoc: &hv.Package{
			PkgInfo: &hv.PkgInfo{
				ImportPath:  match["importPath"],
				ProjectName: match["repo"],
				ProjectPath: com.Expand("github.com/{owner}/{repo}/blob/{tag}", match),
				ViewDirPath: com.Expand("github.com/{owner}/{repo}/blob/{tag}{dir}", match),
				Tags:        strings.Join(tags, "|||"),
				Ptag:        commit,
				Vcs:         "GitHub",
				Issues:      note.Issues,
				Stars:       note.Stars,
				Forks:       note.Forks,
			},
			PkgDecl: &hv.PkgDecl{
				Tag:  tag,
				Dirs: dirs,
			},
		},
	}

	if len(note.Homepage) > 0 {
		w.Pdoc.Homepage = note.Homepage
	}
	if note.Fork {
		w.Pdoc.ForkUrl = note.Parent.Html
	}

	srcs := make([]*hv.Source, 0, len(files))
	srcMap := make(map[string]*hv.Source)
	for _, f := range files {
		s, _ := f.(*hv.Source)
		srcs = append(srcs, s)

		if !strings.HasSuffix(f.Name(), "_test.go") {
			srcMap[f.Name()] = s
		}
	}

	pdoc, err := w.Build(&hv.WalkRes{
		WalkDepth: hv.WD_All,
		WalkType:  hv.WT_Memory,
		WalkMode:  hv.WM_All,
		Srcs:      srcs,
	})
	if err != nil {
		return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> Fail to build: " + err.Error())
	}

	if len(tag) == 0 && w.Pdoc.IsCmd {
		err = generateHv(match["importPath"], srcMap)
	}

	return pdoc, err
}
Exemplo n.º 12
0
func getGithubDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) {
	match["cred"] = githubCred

	// Get master commit.
	var refs []*struct {
		Ref    string
		Url    string
		Object struct {
			Sha  string
			Type string
			Url  string
		}
	}

	err := httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), &refs)
	if err != nil {
		if strings.HasPrefix(err.Error(), "Resource not found") {
			return nil, NotFoundError{"doc.getGithubDoc(" + match["importPath"] + ") -> " + err.Error()}
		}
		return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> " + err.Error())
	}

	var commit string
	// Get all tags.
	tags := make([]string, 0, 5)
	for _, ref := range refs {
		switch {
		case strings.HasPrefix(ref.Ref, "refs/heads/master"):
			commit = ref.Object.Sha
		case strings.HasPrefix(ref.Ref, "refs/tags/"):
			tags = append(tags, ref.Ref[len("refs/tags/"):])
		}
	}

	if len(tags) > 5 {
		tags = tags[len(tags)-5:]
	}
	tags = append([]string{"master"}, tags...)

	if len(tag) == 0 {
		// Check revision tag.
		if commit == savedEtag {
			return nil, errNotModified
		}

		match["tag"] = "master"
	} else {
		match["tag"] = tag
	}

	// Get files.
	var tree struct {
		Tree []struct {
			Url  string
			Path string
			Type string
		}
		Url string
	}

	err = httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/trees/{tag}?recursive=1&{cred}", match), &tree)
	if err != nil {
		return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> get trees: " + err.Error())
	}

	// Because Github API URLs are case-insensitive, we need to check that the
	// userRepo returned from Github matches the one that we are requesting.
	if !strings.HasPrefix(tree.Url, expand("https://api.github.com/repos/{owner}/{repo}/", match)) {
		return nil, errors.New("Github import path has incorrect case.")
	}

	// Get source file data and subdirectories.
	dirPrefix := match["dir"]
	if dirPrefix != "" {
		dirPrefix = dirPrefix[1:] + "/"
	}
	preLen := len(dirPrefix)

	isGoPro := false // Indicates whether it's a Go project.
	isRootPath := match["importPath"] == utils.GetProjectPath(match["importPath"])
	dirs := make([]string, 0, 5)
	files := make([]*source, 0, 5)
	for _, node := range tree.Tree {
		// Skip directories and files in wrong directories, get them later.
		if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) {
			continue
		}

		// Get files and check if directories have acceptable files.
		if d, f := path.Split(node.Path); utils.IsDocFile(f) &&
			utils.FilterDirName(d) {
			// Check if it's a Go file.
			if isRootPath && !isGoPro && strings.HasSuffix(f, ".go") {
				isGoPro = true
			}

			// Check if file is in the directory that is corresponding to import path.
			if d == dirPrefix {
				// Yes.
				if !isRootPath && !isGoPro && strings.HasSuffix(f, ".go") {
					isGoPro = true
				}
				files = append(files, &source{
					name:      f,
					browseURL: expand("https://github.com/{owner}/{repo}/blob/{tag}/{0}", match, node.Path),
					rawURL:    node.Url + "?" + githubCred,
				})
			} else {
				sd, _ := path.Split(d[preLen:])
				sd = strings.TrimSuffix(sd, "/")
				if !checkDir(sd, dirs) {
					dirs = append(dirs, sd)
				}
			}
		}
	}

	if !isGoPro {
		return nil, NotFoundError{"Cannot find Go files, it's not a Go project."}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	// Fetch file from VCS.
	if err := fetchFiles(client, files, githubRawHeader); err != nil {
		return nil, err
	}

	// Get addtional information: forks, watchers.
	// var note struct {
	// 	Forks    int
	// 	Watchers int `json:"watchers_count"`
	// }

	// err = httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}?{cred}", match), &note)
	// if err != nil {
	// 	return nil, errors.New("doc.getGithubDoc(" + match["importPath"] + ") -> get note: " + err.Error())
	// }

	// Start generating data.
	w := &walker{
		lineFmt: "#L%d",
		pdoc: &Package{
			ImportPath:  match["importPath"],
			ProjectName: match["repo"],
			Tags:        tags,
			Tag:         tag,
			Etag:        commit,
			Dirs:        dirs,
			//Note: strconv.Itoa(note.Forks) + "|" +
			//	strconv.Itoa(note.Watchers) + "|",
		},
	}
	return w.build(files)
}
Exemplo n.º 13
0
func getOSCDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) {
	if len(tag) == 0 {
		match["tag"] = "master"
	} else {
		match["tag"] = tag
	}

	// Force to lower case.
	match["importPath"] = strings.ToLower(match["importPath"])

	match["projectRoot"] = utils.GetProjectPath(match["importPath"])
	// Download zip.
	p, err := httpGetBytes(client, expand("http://{projectRoot}/repository/archive?ref={tag}", match), nil)
	if err != nil {
		return nil, errors.New("doc.getOSCDoc(" + match["importPath"] + ") -> " + err.Error())
	}

	r, err := zip.NewReader(bytes.NewReader(p), int64(len(p)))
	if err != nil {
		return nil, errors.New("doc.getOSCDoc(" + match["importPath"] + ") -> create zip: " + err.Error())
	}

	commit := r.Comment
	// Get source file data and subdirectories.
	nameLen := len(match["repo"])
	dirPrefix := match["dir"]
	if dirPrefix != "" {
		dirPrefix = dirPrefix[1:] + "/"
	}
	preLen := len(dirPrefix)

	isGoPro := false // Indicates whether it's a Go project.
	isRootPath := match["importPath"] == utils.GetProjectPath(match["importPath"])
	dirs := make([]string, 0, 5)
	files := make([]*source, 0, 5)
	for _, f := range r.File {
		fileName := f.FileInfo().Name()[nameLen+1:]
		// Skip directories and files in wrong directories, get them later.
		if strings.HasSuffix(fileName, "/") || !strings.HasPrefix(fileName, dirPrefix) {
			continue
		}

		// Get files and check if directories have acceptable files.
		if d, fn := path.Split(fileName); utils.IsDocFile(fn) &&
			utils.FilterDirName(d) {
			// Check if it's a Go file.
			if isRootPath && !isGoPro && strings.HasSuffix(fn, ".go") {
				isGoPro = true
			}

			// Check if file is in the directory that is corresponding to import path.
			if d == dirPrefix {
				// Yes.
				if !isRootPath && !isGoPro && strings.HasSuffix(fn, ".go") {
					isGoPro = true
				}
				// Get file from archive.
				rc, err := f.Open()
				if err != nil {
					return nil, errors.New("doc.getOSCDoc(" + match["importPath"] + ") -> open file: " + err.Error())
				}

				p := make([]byte, f.FileInfo().Size())
				rc.Read(p)
				if err != nil {
					return nil, errors.New("doc.getOSCDoc(" + match["importPath"] + ") -> read file: " + err.Error())
				}

				files = append(files, &source{
					name:      fn,
					browseURL: expand("http://git.oschina.net/{owner}/{repo}/blob/{tag}/{0}", match, fileName),
					rawURL:    expand("http://git.oschina.net/{owner}/{repo}/raw/{tag}/{0}", match, fileName[preLen:]),
					data:      p,
				})
			} else {
				sd, _ := path.Split(d[preLen:])
				sd = strings.TrimSuffix(sd, "/")
				if !checkDir(sd, dirs) {
					dirs = append(dirs, sd)
				}
			}
		}
	}

	if !isGoPro {
		return nil, NotFoundError{"Cannot find Go files, it's not a Go project."}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	// Get all tags.
	tags := getOSCTags(client, match["importPath"])

	// Start generating data.
	w := &walker{
		lineFmt: "#L%d",
		pdoc: &Package{
			ImportPath:  match["importPath"],
			ProjectName: match["repo"],
			Tags:        tags,
			Tag:         tag,
			Etag:        commit,
			Dirs:        dirs,
		},
	}
	return w.build(files)
}
Exemplo n.º 14
0
func getBitbucketDoc(client *http.Client, match map[string]string, savedEtag string) (*Package, error) {

	if m := bitbucketEtagRe.FindStringSubmatch(savedEtag); m != nil {
		match["vcs"] = m[1]
	} else {
		var repo struct {
			Scm string
		}
		if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil {
			return nil, err
		}
		match["vcs"] = repo.Scm
	}

	tags := make(map[string]string)
	for _, nodeType := range []string{"branches", "tags"} {
		var nodes map[string]struct {
			Node string
		}
		if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/{0}", match, nodeType), &nodes); err != nil {
			return nil, err
		}
		for t, n := range nodes {
			tags[t] = n.Node
		}
	}

	var err error
	match["tag"], match["commit"], err = bestTag(tags, defaultTags[match["vcs"]])
	if err != nil {
		return nil, err
	}

	// Check revision tag.
	etag := expand("{vcs}-{commit}", match)
	if etag == savedEtag {
		return nil, errNotModified
	}

	var node struct {
		Files []struct {
			Path string
		}
		Directories []string
	}

	if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/src/{tag}{dir}/", match), &node); err != nil {
		return nil, err
	}

	// Get source file data.
	files := make([]*source, 0, 5)
	for _, f := range node.Files {
		_, name := path.Split(f.Path)
		if utils.IsDocFile(name) {
			files = append(files, &source{
				name:      name,
				browseURL: expand("https://bitbucket.org/{owner}/{repo}/src/{tag}/{0}", match, f.Path),
				rawURL:    expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/raw/{tag}/{0}", match, f.Path),
			})
		}
	}

	if len(files) == 0 && len(node.Directories) == 0 {
		return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	// Fetch file from VCS.
	if err := fetchFiles(client, files, nil); err != nil {
		return nil, err
	}

	// Start generating data.
	w := &walker{
		lineFmt: "#cl-%d",
		pdoc: &Package{
			ImportPath:  match["importPath"],
			ProjectName: match["repo"],
			Etag:        etag,
			Dirs:        node.Directories,
		},
	}
	return w.build(files)
}
Exemplo n.º 15
0
func getCSDNDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) {
	if len(tag) == 0 {
		match["tag"] = "master"
	} else {
		match["tag"] = tag
	}

	match["projectRoot"] = utils.GetProjectPath(match["importPath"])
	// Download zip.
	p, err := httpGetBytes(client, expand("https://{projectRoot}/repository/archive?ref={tag}", match), nil)
	if err != nil {
		return nil, errors.New("doc.getCSDNDoc(" + match["importPath"] + ") -> " + err.Error())
	}

	r, err := zip.NewReader(bytes.NewReader(p), int64(len(p)))
	if err != nil {
		return nil, errors.New("doc.getCSDNDoc(" + match["importPath"] + ") -> create zip: " + err.Error())
	}

	commit := r.Comment
	// Get source file data and subdirectories.
	nameLen := len(match["importPath"][13:])
	dirLen := nameLen + len(match["dir"])
	dirs := make([]string, 0, 5)
	files := make([]*source, 0, 5)
	for _, f := range r.File {
		fileName := f.FileInfo().Name()
		if len(fileName) < dirLen {
			continue
		}

		// File.
		if utils.IsDocFile(fileName[dirLen+1:]) && strings.LastIndex(fileName, "/") == dirLen {
			// Get file from archive.
			rc, err := f.Open()
			if err != nil {
				return nil, errors.New("doc.getCSDNDoc(" + match["importPath"] + ") -> open file: " + err.Error())
			}

			p := make([]byte, f.FileInfo().Size())
			rc.Read(p)
			if err != nil {
				return nil, errors.New("doc.getCSDNDoc(" + match["importPath"] + ") -> read file: " + err.Error())
			}

			files = append(files, &source{
				name:      fileName[dirLen+1:],
				browseURL: expand("http://code.csdn.net/{owner}/{repo}/blob/{tag}/{0}", match, fileName[nameLen+1:]),
				rawURL:    expand("http://code.csdn.net/{owner}/{repo}/raw/{tag}/{0}", match, fileName[dirLen+1:]),
				data:      p,
			})
			continue
		}

		// Directory.
		if strings.HasSuffix(fileName, "/") && utils.FilterDirName(fileName[dirLen+1:]) {
			dirs = append(dirs, fileName[dirLen+1:])
		}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	// Get all tags.
	tags := getCSDNTags(client, match["importPath"])

	// Start generating data.
	w := &walker{
		lineFmt: "#L%d",
		pdoc: &Package{
			ImportPath:  match["importPath"],
			ProjectName: match["repo"],
			Tags:        tags,
			Tag:         tag,
			Etag:        commit,
			Dirs:        dirs,
		},
	}
	return w.build(files)
}
Exemplo n.º 16
0
func getLaunchpadDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) {

	if match["project"] != "" && match["series"] != "" {
		rc, err := httpGet(client, expand("https://code.launchpad.net/{project}{series}/.bzr/branch-format", match), nil)
		switch {
		case err == nil:
			rc.Close()
			// The structure of the import path is launchpad.net/{root}/{dir}.
		case isNotFound(err):
			// The structure of the import path is is launchpad.net/{project}/{dir}.
			match["repo"] = match["project"]
			match["dir"] = expand("{series}{dir}", match)
		default:
			return nil, err
		}
	}

	// Scrape the repo browser to find the project revision and individual Go files.
	p, err := httpGetBytes(client, expand("https://bazaar.launchpad.net/+branch/{repo}/tarball", match), nil)
	if err != nil {
		return nil, err
	}

	gzr, err := gzip.NewReader(bytes.NewReader(p))
	if err != nil {
		return nil, err
	}
	defer gzr.Close()

	tr := tar.NewReader(gzr)

	var hash []byte
	dirPrefix := expand("+branch/{repo}{dir}/", match)

	// Get source file data.
	dirs := make([]string, 0, 3)
	files := make([]*source, 0, 5)
	for {
		h, err := tr.Next()
		if err == io.EOF {
			break
		}
		if err != nil {
			return nil, err
		}

		d, f := path.Split(h.Name)
		if !utils.IsDocFile(f) {
			// Check directories.
			if len(f) == 0 && strings.HasPrefix(d, dirPrefix) && len(d) > len(dirPrefix) {
				sub := h.Name[:len(h.Name)-1]
				dirs = append(dirs, sub[strings.LastIndex(sub, "/")+1:])
			}
			continue
		}
		b := make([]byte, h.Size)
		if _, err := io.ReadFull(tr, b); err != nil {
			return nil, err
		}

		m := md5.New()
		m.Write(b)
		hash = m.Sum(hash)

		if !strings.HasPrefix(h.Name, dirPrefix) {
			continue
		}

		if d == dirPrefix {
			files = append(files, &source{
				name:      f,
				browseURL: expand("http://bazaar.launchpad.net/+branch/{repo}/view/head:{dir}/{0}", match, f),
				data:      b})
		}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	sort.Sort(byHash(hash))
	m := md5.New()
	m.Write(hash)
	hash = m.Sum(hash[:0])
	etag := hex.EncodeToString(hash)
	if etag == savedEtag {
		return nil, errNotModified
	}

	// Start generating data.
	w := &walker{
		lineFmt: "#L%d",
		pdoc: &Package{
			ImportPath:  match["importPath"],
			ProjectName: match["repo"],
			Etag:        etag,
			Dirs:        dirs,
		},
	}
	return w.build(files)
}
Exemplo n.º 17
0
func getGoogleDoc(client *http.Client, match map[string]string, tag, ptag string) (*hv.Package, error) {
	setupGoogleMatch(match)
	if m := googleEtagRe.FindStringSubmatch(ptag); m != nil {
		match["vcs"] = m[1]
	} else if err := getGoogleVCS(match); err != nil {
		return nil, err
	}

	match["tag"] = tag
	// Scrape the repo browser to find the project revision and individual Go files.
	stdout, _, err := com.ExecCmd("curl", com.Expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/?r={tag}", match))
	if err != nil {
		return nil, errors.New("doc.getGoogleDoc(" + match["importPath"] + ") -> " + err.Error())
	}
	p := []byte(stdout)

	// Check revision tag.
	var etag string
	if m := googleRevisionRe.FindSubmatch(p); m == nil {
		return nil, errors.New("doc.getGoogleDoc(" + match["importPath"] + ") -> Could not find revision")
	} else {
		etag = string(m[1])
		if etag == ptag {
			return nil, errNotModified
		}
	}

	match["browserUrlTpl"] = "code.google.com/p/{repo}/source/browse{dir}/{0}?repo={subrepo}&r={tag}"
	match["rawSrcUrlTpl"] = "http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/{0}?r={tag}"
	var isGoPro bool
	var files []com.RawFile
	var dirs []string
	// Unrecord and non-SVN project can download archive.
	if len(ptag) == 0 || match["vcs"] == "svn" {
		tmpTag := match["tag"]
		if len(tmpTag) == 0 {
			tmpTag = defaultTags[match["vcs"]]
		}

		isGoPro, _, files, dirs, err = getRepoByArchive(match,
			com.Expand("http://{subrepo}{dot}{repo}.googlecode.com/archive/{0}.zip", match, tmpTag))
		if err != nil {
			return nil, errors.New("doc.getGoogleDoc(" + match["importPath"] + ") -> Fail to download archive: " + err.Error())
		}
	} else {
		// Get source file data.
		ms := googleFileRe.FindAllSubmatch(p, -1)
		files = make([]com.RawFile, 0, len(ms))
		for _, m := range ms {
			fname := strings.Split(string(m[1]), "?")[0]
			if utils.IsDocFile(fname) {
				isGoPro = true
				files = append(files, &hv.Source{
					SrcName:   fname,
					BrowseUrl: com.Expand(match["browserUrlTpl"], match, fname),
					RawSrcUrl: com.Expand(match["rawSrcUrlTpl"], match, fname),
				})
			}
		}

		// Get subdirectories.
		ms = googleDirRe.FindAllSubmatch(p, -1)
		dirs = make([]string, 0, len(ms))
		for _, m := range ms {
			dirName := strings.Split(string(m[1]), "?")[0]
			// Make sure we get directories.
			if strings.HasSuffix(dirName, "/") &&
				utils.FilterDirName(dirName) {
				dirs = append(dirs, strings.Replace(dirName, "/", "", -1))
			}
		}
	}

	if !isGoPro {
		return nil, com.NotFoundError{"Cannot find Go files, it's not a Go project."}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, com.NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	// Fetch file from VCS.
	if err := fetchGoogleFiles(client, files); err != nil {
		return nil, err
	}

	// Get all tags.
	tags := getGoogleTags(match["importPath"], defaultTags[match["vcs"]], false)

	// Start generating data.
	w := &hv.Walker{
		LineFmt: "#%d",
		Pdoc: &hv.Package{
			PkgInfo: &hv.PkgInfo{
				ImportPath: match["importPath"],
				IsGoSubrepo: utils.IsGoSubrepoPath(strings.TrimPrefix(
					match["importPath"], "code.google.com/p/")),
				ProjectName: com.Expand("{repo}{dot}{subrepo}", match),
				ProjectPath: com.Expand("code.google.com/p/{repo}/source/browse/?repo={subrepo}&r={tag}", match),
				ViewDirPath: com.Expand("code.google.com/p/{repo}/source/browse{dir}?repo={subrepo}&r={tag}", match),
				Tags:        strings.Join(tags, "|||"),
				Ptag:        etag,
				Vcs:         "Google Code",
			},
			PkgDecl: &hv.PkgDecl{
				Tag:  tag,
				Dirs: dirs,
			},
		},
	}

	srcs := make([]*hv.Source, 0, len(files))
	srcMap := make(map[string]*hv.Source)
	for _, f := range files {
		s, _ := f.(*hv.Source)
		srcs = append(srcs, s)

		if len(tag) == 0 && (w.Pdoc.IsGoSubrepo || w.Pdoc.IsCmd) &&
			!strings.HasSuffix(f.Name(), "_test.go") {
			srcMap[f.Name()] = s
		}
	}

	pdoc, err := w.Build(&hv.WalkRes{
		WalkDepth: hv.WD_All,
		WalkType:  hv.WT_Memory,
		WalkMode:  hv.WM_All,
		Srcs:      srcs,
	})
	if err != nil {
		return nil, errors.New("doc.getGoogleDoc(" + match["importPath"] + ") -> Fail to build: " + err.Error())
	}

	if len(tag) == 0 && (w.Pdoc.IsGoSubrepo || w.Pdoc.IsCmd) {
		err = generateHv(match["importPath"], srcMap)
	}

	return pdoc, err
}
Exemplo n.º 18
0
func getLaunchpadDoc(client *http.Client, match map[string]string, tag, savedEtag string) (*Package, error) {

	if match["project"] != "" && match["series"] != "" {
		rc, err := httpGet(client, expand("https://code.launchpad.net/{project}{series}/.bzr/branch-format", match), nil)
		switch {
		case err == nil:
			rc.Close()
			// The structure of the import path is launchpad.net/{root}/{dir}.
		case isNotFound(err):
			// The structure of the import path is is launchpad.net/{project}/{dir}.
			match["repo"] = match["project"]
			match["dir"] = expand("{series}{dir}", match)
		default:
			return nil, err
		}
	}

	// Scrape the repo browser to find the project revision and individual Go files.
	p, err := httpGetBytes(client, expand("https://bazaar.launchpad.net/+branch/{repo}/tarball", match), nil)
	if err != nil {
		return nil, err
	}

	// Get source file data.
	gzr, err := gzip.NewReader(bytes.NewReader(p))
	if err != nil {
		return nil, err
	}
	defer gzr.Close()

	tr := tar.NewReader(gzr)

	var hash []byte
	dirPrefix := expand("+branch/{repo}{dir}/", match)
	preLen := len(dirPrefix)

	isGoPro := false // Indicates whether it's a Go project.
	isRootPath := match["importPath"] == utils.GetProjectPath(match["importPath"])
	dirs := make([]string, 0, 3)
	files := make([]*source, 0, 5)
	for {
		h, err := tr.Next()
		if err == io.EOF {
			break
		}
		if err != nil {
			return nil, err
		}

		// Skip directories and files in wrong directories, get them later.
		if strings.HasSuffix(h.Name, "/") || !strings.HasPrefix(h.Name, dirPrefix) {
			continue
		}

		d, f := path.Split(h.Name)
		if utils.IsDocFile(f) && utils.FilterDirName(d) {
			// Check if it's a Go file.
			if isRootPath && !isGoPro && strings.HasSuffix(f, ".go") {
				isGoPro = true
			}

			// Get file from archive.
			b := make([]byte, h.Size)
			if _, err := io.ReadFull(tr, b); err != nil {
				return nil, err
			}

			m := md5.New()
			m.Write(b)
			hash = m.Sum(hash)

			// Check if file is in the directory that is corresponding to import path.
			if d == dirPrefix {
				// Yes.
				if !isRootPath && !isGoPro && strings.HasSuffix(f, ".go") {
					isGoPro = true
				}
				files = append(files, &source{
					name:      f,
					browseURL: expand("http://bazaar.launchpad.net/+branch/{repo}/view/head:{dir}/{0}", match, f),
					data:      b})
			} else {
				sd, _ := path.Split(d[preLen:])
				sd = strings.TrimSuffix(sd, "/")
				if !checkDir(sd, dirs) {
					dirs = append(dirs, sd)
				}
			}
		}
	}

	if !isGoPro {
		return nil, NotFoundError{"Cannot find Go files, it's not a Go project."}
	}

	if len(files) == 0 && len(dirs) == 0 {
		return nil, NotFoundError{"Directory tree does not contain Go files and subdirs."}
	}

	sort.Sort(byHash(hash))
	m := md5.New()
	m.Write(hash)
	hash = m.Sum(hash[:0])
	etag := hex.EncodeToString(hash)
	if etag == savedEtag {
		return nil, errNotModified
	}

	// Start generating data.
	w := &walker{
		lineFmt: "#L%d",
		pdoc: &Package{
			ImportPath:  match["importPath"],
			ProjectName: match["repo"],
			Etag:        etag,
			Dirs:        dirs,
		},
	}
	return w.build(files)
}
Exemplo n.º 19
0
func getVCSDoc(client *http.Client, match map[string]string, etagSaved string) (*Package, error) {
	cmd := vcsCmds[match["vcs"]]
	if cmd == nil {
		return nil, NotFoundError{expand("VCS not supported: {vcs}", match)}
	}

	scheme := match["scheme"]
	if scheme == "" {
		i := strings.Index(etagSaved, "-")
		if i > 0 {
			scheme = etagSaved[:i]
		}
	}

	schemes := cmd.schemes
	if scheme != "" {
		for i := range cmd.schemes {
			if cmd.schemes[i] == scheme {
				schemes = cmd.schemes[i : i+1]
				break
			}
		}
	}

	// Download and checkout.

	tag, _, err := cmd.download(schemes, match["repo"], etagSaved)
	if err != nil {
		return nil, err
	}

	// Find source location.

	urlTemplate, urlMatch, lineFmt := lookupURLTemplate(match["repo"], match["dir"], tag)

	// Slurp source files.

	d := path.Join(repoRoot, expand("{repo}.{vcs}", match), match["dir"])
	f, err := os.Open(d)
	if err != nil {
		if os.IsNotExist(err) {
			err = NotFoundError{err.Error()}
		}
		return nil, err
	}
	fis, err := f.Readdir(-1)
	if err != nil {
		return nil, err
	}

	// Get source file data.
	var files []*source
	for _, fi := range fis {
		if fi.IsDir() || !utils.IsDocFile(fi.Name()) {
			continue
		}
		b, err := ioutil.ReadFile(path.Join(d, fi.Name()))
		if err != nil {
			return nil, err
		}
		files = append(files, &source{
			name:      fi.Name(),
			browseURL: expand(urlTemplate, urlMatch, fi.Name()),
			data:      b,
		})
	}

	// Start generating data.
	w := &walker{
		lineFmt: lineFmt,
		pdoc: &Package{
			ImportPath:  match["importPath"],
			ProjectName: path.Base(match["repo"]),
		},
	}
	return w.build(files)
}