Пример #1
0
func CrawlPackage(httpClient *http.Client, pkg string) (p *Package, err error) {
	pdoc, err := doc.Get(httpClient, pkg, "")
	if err != nil {
		return nil, villa.NestErrorf(err, "CrawlPackage(%s)", pkg)
	}

	readmeFn, readmeData := "", ""
	for fn, data := range pdoc.ReadmeFiles {
		readmeFn, readmeData = fn, string(data)
		if utf8.ValidString(readmeData) {
			break
		} else {
			readmeFn, readmeData = "", ""
		}
	}

	if pdoc.Doc == "" && pdoc.Synopsis == "" {
		pdoc.Synopsis = godoc.Synopsis(readmeData)
	}

	return &Package{
		Name:       pdoc.Name,
		ImportPath: pdoc.ImportPath,
		Synopsis:   pdoc.Synopsis,
		Doc:        pdoc.Doc,
		ProjectURL: pdoc.ProjectURL,
		StarCount:  pdoc.StarCount,

		ReadmeFn:   readmeFn,
		ReadmeData: readmeData,

		Imports:    pdoc.Imports,
		References: pdoc.References,
	}, nil
}
Пример #2
0
func (p *Package) HasDoc() bool {
	if p.dpkg != nil {
		synopsis := strings.TrimSpace(doc.Synopsis(p.dpkg.Doc))
		full := strings.Replace(strings.Replace(strings.TrimSpace(p.dpkg.Doc), "\r", "", -1), "\n", " ", -1)
		return synopsis != full
	}
	return false
}
func init() {
	playEnabled = true

	log.Println("initializing godoc ...")
	log.Printf(".zip file   = %s", zipFilename)
	log.Printf(".zip GOROOT = %s", zipGoroot)
	log.Printf("index files = %s", indexFilenames)

	// Determine file system to use.
	local.Init(zipGoroot, zipFilename, "", "")
	fs.Bind("/", local.RootFS(), "/", vfs.BindReplace)
	fs.Bind("/lib/godoc", local.StaticFS(*lang), "/", vfs.BindReplace)
	fs.Bind("/doc", local.DocumentFS(*lang), "/", vfs.BindReplace)

	corpus := godoc.NewCorpus(fs)
	corpus.Verbose = false
	corpus.MaxResults = 10000 // matches flag default in main.go
	corpus.IndexEnabled = true
	corpus.IndexFiles = indexFilenames

	// translate hook
	corpus.SummarizePackage = func(importPath string) (summary string, showList, ok bool) {
		if pkg := local.Package(*lang, importPath); pkg != nil {
			summary = doc.Synopsis(pkg.Doc)
		}
		ok = (summary != "")
		return
	}
	corpus.TranslateDocPackage = func(pkg *doc.Package) *doc.Package {
		return local.Package(*lang, pkg.ImportPath, pkg)
	}

	if err := corpus.Init(); err != nil {
		log.Fatal(err)
	}
	if corpus.IndexEnabled && corpus.IndexFiles != "" {
		go corpus.RunIndexer()
	}

	pres = godoc.NewPresentation(corpus)
	pres.TabWidth = 8
	pres.ShowPlayground = true
	pres.ShowExamples = true
	pres.DeclLinks = true
	pres.NotesRx = regexp.MustCompile("BUG")

	readTemplates(pres, true)
	registerHandlers(pres)

	log.Println("godoc initialization complete")
}
Пример #4
0
func maybeDoc(adoc string, name string) string {
	adoc = strings.TrimRight(doc.Synopsis(adoc), ".")
	if adoc == "" {
		return ""
	}
	prefix := name + " "
	if strings.HasPrefix(adoc, prefix) {
		adoc = adoc[len(prefix):]
	} else if ap := "A " + prefix; strings.HasPrefix(adoc, ap) {
		adoc = adoc[len(ap):]
	} else if ap := "An " + prefix; strings.HasPrefix(adoc, ap) {
		adoc = adoc[len(ap):]
	}
	return " - " + adoc
}
Пример #5
0
func Get(client *http.Client, importPath string, etag string) (*Package, error) {

	const versionPrefix = PackageVersion + "-"

	if strings.HasPrefix(etag, versionPrefix) {
		etag = etag[len(versionPrefix):]
	} else {
		etag = ""
	}

	dir, err := gosrc.Get(client, importPath, etag)
	if err != nil {
		return nil, err
	}

	pdoc, err := newPackage(dir)
	if err != nil {
		return pdoc, err
	}

	if pdoc.Synopsis == "" &&
		pdoc.Doc == "" &&
		!pdoc.IsCmd &&
		pdoc.Name != "" &&
		dir.ImportPath == dir.ProjectRoot &&
		len(pdoc.Errors) == 0 {
		project, err := gosrc.GetProject(client, dir.ResolvedPath)
		switch {
		case err == nil:
			pdoc.Synopsis = doc.Synopsis(project.Description)
		case gosrc.IsNotFound(err):
			// ok
		default:
			return nil, err
		}
	}

	return pdoc, nil
}
Пример #6
0
func (x *Indexer) indexDocs(dirname string, filename string, astFile *ast.File) {
	pkgName := astFile.Name.Name
	if pkgName == "main" {
		return
	}
	astPkg := ast.Package{
		Name: pkgName,
		Files: map[string]*ast.File{
			filename: astFile,
		},
	}
	var m doc.Mode
	docPkg := doc.New(&astPkg, pathpkg.Clean(dirname), m)
	addIdent := func(sk SpotKind, name string, docstr string) {
		if x.idents[sk] == nil {
			x.idents[sk] = make(map[string][]Ident)
		}
		x.idents[sk][name] = append(x.idents[sk][name], Ident{
			Path:    pathpkg.Clean(dirname),
			Package: pkgName,
			Name:    name,
			Doc:     doc.Synopsis(docstr),
		})
	}
	for _, c := range docPkg.Consts {
		for _, name := range c.Names {
			addIdent(ConstDecl, name, c.Doc)
		}
	}
	for _, t := range docPkg.Types {
		addIdent(TypeDecl, t.Name, t.Doc)
		for _, c := range t.Consts {
			for _, name := range c.Names {
				addIdent(ConstDecl, name, c.Doc)
			}
		}
		for _, v := range t.Vars {
			for _, name := range v.Names {
				addIdent(VarDecl, name, v.Doc)
			}
		}
		for _, f := range t.Funcs {
			addIdent(FuncDecl, f.Name, f.Doc)
		}
		for _, f := range t.Methods {
			addIdent(MethodDecl, f.Name, f.Doc)
			// Change the name of methods to be "<typename>.<methodname>".
			// They will still be indexed as <methodname>.
			idents := x.idents[MethodDecl][f.Name]
			idents[len(idents)-1].Name = t.Name + "." + f.Name
		}
	}
	for _, v := range docPkg.Vars {
		for _, name := range v.Names {
			addIdent(VarDecl, name, v.Doc)
		}
	}
	for _, f := range docPkg.Funcs {
		addIdent(FuncDecl, f.Name, f.Doc)
	}
}
Пример #7
0
func (b *treeBuilder) newDirTree(fset *token.FileSet, path, name string, depth int) *Directory {
	if b.pathFilter != nil && !b.pathFilter(path) {
		return nil
	}

	if depth >= b.maxDepth {
		// return a dummy directory so that the parent directory
		// doesn't get discarded just because we reached the max
		// directory depth
		return &Directory{depth, path, name, "", nil}
	}

	list, err := fs.ReadDir(path)
	if err != nil {
		// newDirTree is called with a path that should be a package
		// directory; errors here should not happen, but if they do,
		// we want to know about them
		log.Printf("ReadDir(%s): %s", path, err)
	}

	// determine number of subdirectories and if there are package files
	ndirs := 0
	hasPkgFiles := false
	var synopses [4]string // prioritized package documentation (0 == highest priority)
	for _, d := range list {
		switch {
		case isPkgDir(d):
			ndirs++
		case isPkgFile(d):
			// looks like a package file, but may just be a file ending in ".go";
			// don't just count it yet (otherwise we may end up with hasPkgFiles even
			// though the directory doesn't contain any real package files - was bug)
			if synopses[0] == "" {
				// no "optimal" package synopsis yet; continue to collect synopses
				//file, err := parseFile(fset, filepath.Join(path, d.Name()),
				//parser.ParseComments|parser.PackageClauseOnly)
				file, err := parser.ParseFile(fset, filepath.Join(path, d.Name()), nil,
					parser.ParseComments|parser.PackageClauseOnly)

				if err == nil {
					hasPkgFiles = true
					if file.Doc != nil {
						// prioritize documentation
						i := -1
						switch file.Name.Name {
						case name:
							i = 0 // normal case: directory name matches package name
						case fakePkgName:
							i = 1 // synopses for commands
						case "main":
							i = 2 // directory contains a main package
						default:
							i = 3 // none of the above
						}
						if 0 <= i && i < len(synopses) && synopses[i] == "" {
							synopses[i] = doc.Synopsis(file.Doc.Text())
						}
					}
				}
			}
		}
	}

	// create subdirectory tree
	var dirs []*Directory
	if ndirs > 0 {
		dirs = make([]*Directory, ndirs)
		i := 0
		for _, d := range list {
			if isPkgDir(d) {
				name := d.Name()
				dd := b.newDirTree(fset, filepath.Join(path, name), name, depth+1)
				if dd != nil {
					dirs[i] = dd
					i++
				}
			}
		}
		dirs = dirs[0:i]
	}

	// if there are no package files and no subdirectories
	// containing package files, ignore the directory
	if !hasPkgFiles && len(dirs) == 0 {
		return nil
	}

	// select the highest-priority synopsis for the directory entry, if any
	synopsis := ""
	for _, synopsis = range synopses {
		if synopsis != "" {
			break
		}
	}

	return &Directory{depth, path, name, synopsis, dirs}
}
Пример #8
0
func runGodoc() {
	// Determine file system to use.
	local.Init(*goroot, *zipfile, *templateDir, build.Default.GOPATH)
	fs.Bind("/", local.RootFS(), "/", vfs.BindReplace)
	fs.Bind("/lib/godoc", local.StaticFS(*lang), "/", vfs.BindReplace)
	fs.Bind("/doc", local.DocumentFS(*lang), "/", vfs.BindReplace)

	httpMode := *httpAddr != ""

	var typeAnalysis, pointerAnalysis bool
	if *analysisFlag != "" {
		for _, a := range strings.Split(*analysisFlag, ",") {
			switch a {
			case "type":
				typeAnalysis = true
			case "pointer":
				pointerAnalysis = true
			default:
				log.Fatalf("unknown analysis: %s", a)
			}
		}
	}

	corpus := godoc.NewCorpus(fs)

	// translate hook
	corpus.SummarizePackage = func(importPath string) (summary string, showList, ok bool) {
		if pkg := local.Package(*lang, importPath); pkg != nil {
			summary = doc.Synopsis(pkg.Doc)
		}
		ok = (summary != "")
		return
	}
	corpus.TranslateDocPackage = func(pkg *doc.Package) *doc.Package {
		return local.Package(*lang, pkg.ImportPath, pkg)
	}

	corpus.Verbose = *verbose
	corpus.MaxResults = *maxResults
	corpus.IndexEnabled = *indexEnabled && httpMode
	if *maxResults == 0 {
		corpus.IndexFullText = false
	}
	corpus.IndexFiles = *indexFiles
	corpus.IndexThrottle = *indexThrottle
	if *writeIndex {
		corpus.IndexThrottle = 1.0
		corpus.IndexEnabled = true
	}
	if *writeIndex || httpMode || *urlFlag != "" {
		if err := corpus.Init(); err != nil {
			log.Fatal(err)
		}
	}

	pres = godoc.NewPresentation(corpus)
	pres.TabWidth = *tabWidth
	pres.ShowTimestamps = *showTimestamps
	pres.ShowPlayground = *showPlayground
	pres.ShowExamples = *showExamples
	pres.DeclLinks = *declLinks
	pres.SrcMode = *srcMode
	pres.HTMLMode = *html
	if *notesRx != "" {
		pres.NotesRx = regexp.MustCompile(*notesRx)
	}

	readTemplates(pres, httpMode || *urlFlag != "")
	registerHandlers(pres)

	if *writeIndex {
		// Write search index and exit.
		if *indexFiles == "" {
			log.Fatal("no index file specified")
		}

		log.Println("initialize file systems")
		*verbose = true // want to see what happens

		corpus.UpdateIndex()

		log.Println("writing index file", *indexFiles)
		f, err := os.Create(*indexFiles)
		if err != nil {
			log.Fatal(err)
		}
		index, _ := corpus.CurrentIndex()
		_, err = index.WriteTo(f)
		if err != nil {
			log.Fatal(err)
		}

		log.Println("done")
		return
	}

	// Print content that would be served at the URL *urlFlag.
	if *urlFlag != "" {
		handleURLFlag()
		return
	}

	if httpMode {
		// HTTP server mode.
		var handler http.Handler = http.DefaultServeMux
		if *verbose {
			log.Printf("Go Documentation Server")
			log.Printf("version = %s", runtime.Version())
			log.Printf("address = %s", *httpAddr)
			log.Printf("goroot = %s", *goroot)
			log.Printf("tabwidth = %d", *tabWidth)
			switch {
			case !*indexEnabled:
				log.Print("search index disabled")
			case *maxResults > 0:
				log.Printf("full text index enabled (maxresults = %d)", *maxResults)
			default:
				log.Print("identifier search index enabled")
			}
			handler = loggingHandler(handler)
		}

		// Initialize search index.
		if *indexEnabled {
			go corpus.RunIndexer()
		}

		// Start type/pointer analysis.
		if typeAnalysis || pointerAnalysis {
			go analysis.Run(pointerAnalysis, &corpus.Analysis)
		}

		// Start http server.
		if err := http.ListenAndServe(*httpAddr, handler); err != nil {
			log.Fatalf("ListenAndServe %s: %v", *httpAddr, err)
		}

		return
	}

	if *query {
		handleRemoteSearch()
		return
	}

	if err := godoc.CommandLine(os.Stdout, fs, pres, flag.Args()); err != nil {
		log.Print(err)
	}
}
Пример #9
0
func CrawlPackage(httpClient doc.HttpClient, pkg string, etag string) (p *Package, folders []*sppb.FolderInfo, err error) {
	defer func() {
		if perr := recover(); perr != nil {
			p, folders, err = nil, nil, errorsp.NewWithStacks("Panic when crawling package %s: %v", pkg, perr)
		}
	}()
	var pdoc *doc.Package

	if strings.HasPrefix(pkg, "thezombie.net") {
		return nil, folders, ErrInvalidPackage
	} else if strings.HasPrefix(pkg, "github.com/") {
		if GithubSpider != nil {
			pdoc, folders, err = getGithub(pkg)
		} else {
			pdoc, err = doc.Get(httpClient, pkg, etag)
		}
	} else {
		pdoc, err = newDocGet(httpClient, pkg, etag)
	}
	if err == doc.ErrNotModified {
		return nil, folders, ErrPackageNotModifed
	}
	if err != nil {
		return nil, folders, errorsp.WithStacks(err)
	}
	if pdoc.StarCount < 0 {
		// if starcount is not fetched, choose fusion of Plusone and
		// Like Button
		plus, like := -1, -1
		if starCount, err := Plusone(httpClient, pdoc.ProjectURL); err == nil {
			plus = starCount
		}
		if starCount, err := LikeButton(httpClient, pdoc.ProjectURL); err == nil {
			like = starCount
		}
		pdoc.StarCount = fuseStars(plus, like)
	}
	readmeFn, readmeData := "", ""
	for fn, data := range pdoc.ReadmeFiles {
		readmeFn, readmeData = strings.TrimSpace(fn),
			strings.TrimSpace(string(data))
		if len(readmeData) > 1 && utf8.ValidString(readmeData) {
			break
		} else {
			readmeFn, readmeData = "", ""
		}
	}
	// try find synopsis from readme
	if pdoc.Doc == "" && pdoc.Synopsis == "" {
		pdoc.Synopsis = godoc.Synopsis(ReadmeToText(readmeFn, readmeData))
	}
	if len(readmeData) > 100*1024 {
		readmeData = readmeData[:100*1024]
	}
	importsSet := stringsp.NewSet(pdoc.Imports...)
	importsSet.Delete(pdoc.ImportPath)
	imports := importsSet.Elements()
	testImports := stringsp.NewSet(pdoc.TestImports...)
	testImports.Add(pdoc.XTestImports...)
	testImports.Delete(imports...)
	testImports.Delete(pdoc.ImportPath)

	var exported stringsp.Set
	for _, f := range pdoc.Funcs {
		exported.Add(f.Name)
	}
	for _, t := range pdoc.Types {
		exported.Add(t.Name)
	}
	return &Package{
		Package:    pdoc.ImportPath,
		Name:       pdoc.Name,
		Synopsis:   pdoc.Synopsis,
		Doc:        pdoc.Doc,
		ProjectURL: pdoc.ProjectURL,
		StarCount:  pdoc.StarCount,

		ReadmeFn:   readmeFn,
		ReadmeData: readmeData,

		Imports:     imports,
		TestImports: testImports.Elements(),
		Exported:    exported.Elements(),

		References: pdoc.References,
		Etag:       pdoc.Etag,
	}, folders, nil
}
Пример #10
0
func (b *treeBuilder) newDirTree(fset *token.FileSet, path, name string, depth int) *Directory {
	if name == testdataDirName {
		return nil
	}

	if depth >= b.maxDepth {
		// return a dummy directory so that the parent directory
		// doesn't get discarded just because we reached the max
		// directory depth
		return &Directory{
			Depth: depth,
			Path:  path,
			Name:  name,
		}
	}

	var synopses [3]string // prioritized package documentation (0 == highest priority)

	show := true // show in package listing
	hasPkgFiles := false
	haveSummary := false

	if hook := b.c.SummarizePackage; hook != nil {
		if summary, show0, ok := hook(strings.TrimPrefix(path, "/src/")); ok {
			hasPkgFiles = true
			show = show0
			synopses[0] = summary
			haveSummary = true
		}
	}

	list, _ := b.c.fs.ReadDir(path)

	// determine number of subdirectories and if there are package files
	var dirchs []chan *Directory

	for _, d := range list {
		filename := pathpkg.Join(path, d.Name())
		switch {
		case isPkgDir(d):
			ch := make(chan *Directory, 1)
			dirchs = append(dirchs, ch)
			name := d.Name()
			go func() {
				ch <- b.newDirTree(fset, filename, name, depth+1)
			}()
		case !haveSummary && isPkgFile(d):
			// looks like a package file, but may just be a file ending in ".go";
			// don't just count it yet (otherwise we may end up with hasPkgFiles even
			// though the directory doesn't contain any real package files - was bug)
			// no "optimal" package synopsis yet; continue to collect synopses
			parseFileGate <- true
			const flags = parser.ParseComments | parser.PackageClauseOnly
			file, err := b.c.parseFile(fset, filename, flags)
			<-parseFileGate
			if err != nil {
				if b.c.Verbose {
					log.Printf("Error parsing %v: %v", filename, err)
				}
				break
			}

			hasPkgFiles = true
			if file.Doc != nil {
				// prioritize documentation
				i := -1
				switch file.Name.Name {
				case name:
					i = 0 // normal case: directory name matches package name
				case "main":
					i = 1 // directory contains a main package
				default:
					i = 2 // none of the above
				}
				if 0 <= i && i < len(synopses) && synopses[i] == "" {
					synopses[i] = doc.Synopsis(file.Doc.Text())
				}
			}
			haveSummary = synopses[0] != ""
		}
	}

	// create subdirectory tree
	var dirs []*Directory
	for _, ch := range dirchs {
		if d := <-ch; d != nil {
			dirs = append(dirs, d)
		}
	}

	// if there are no package files and no subdirectories
	// containing package files, ignore the directory
	if !hasPkgFiles && len(dirs) == 0 {
		return nil
	}

	// select the highest-priority synopsis for the directory entry, if any
	synopsis := ""
	for _, synopsis = range synopses {
		if synopsis != "" {
			break
		}
	}

	return &Directory{
		Depth:    depth,
		Path:     path,
		Name:     name,
		HasPkg:   hasPkgFiles && show, // TODO(bradfitz): add proper Hide field?
		Synopsis: synopsis,
		Dirs:     dirs,
	}
}
Пример #11
0
func CrawlPackage(httpClient *http.Client, pkg string, etag string) (p *Package, err error) {
	pdoc, err := doc.Get(httpClient, pkg, etag)
	if err == doc.ErrNotModified {
		return nil, ErrPackageNotModifed
	}
	if err != nil {
		return nil, villa.NestErrorf(err, "CrawlPackage(%s)", pkg)
	}

	if pdoc.StarCount < 0 {
		// if starcount is not fetched, choose fusion of Plusone and LikeButton
		plus, like := -1, -1
		if starCount, err := Plusone(httpClient, pdoc.ProjectURL); err == nil {
			plus = starCount
		}
		if starCount, err := LikeButton(httpClient, pdoc.ProjectURL); err == nil {
			like = starCount
		}
		pdoc.StarCount = fuseStars(plus, like)
	}

	readmeFn, readmeData := "", ""
	for fn, data := range pdoc.ReadmeFiles {
		readmeFn, readmeData = strings.TrimSpace(fn), strings.TrimSpace(string(data))
		if len(readmeData) > 1 && utf8.ValidString(readmeData) {
			break
		} else {
			readmeFn, readmeData = "", ""
		}
	}

	// try find synopsis from readme
	if pdoc.Doc == "" && pdoc.Synopsis == "" {
		pdoc.Synopsis = godoc.Synopsis(ReadmeToText(readmeFn, readmeData))
	}

	if len(readmeData) > 100*1024 {
		readmeData = readmeData[:100*1024]
	}

	imports := villa.NewStrSet(pdoc.Imports...)
	imports.Put(pdoc.TestImports...)
	imports.Put(pdoc.XTestImports...)

	var exported villa.StrSet
	for _, f := range pdoc.Funcs {
		exported.Put(f.Name)
	}
	for _, t := range pdoc.Types {
		exported.Put(t.Name)
	}

	return &Package{
		Package:    pdoc.ImportPath,
		Name:       pdoc.Name,
		Synopsis:   pdoc.Synopsis,
		Doc:        pdoc.Doc,
		ProjectURL: pdoc.ProjectURL,
		StarCount:  pdoc.StarCount,
		ReadmeFn:   readmeFn,
		ReadmeData: readmeData,
		Imports:    imports.Elements(),
		Exported:   exported.Elements(),

		References: pdoc.References,
		Etag:       pdoc.Etag,
	}, nil
}
Пример #12
0
func (x *Indexer) indexDocs(dirname string, filename string, astFile *ast.File) {
	pkgName := x.intern(astFile.Name.Name)
	if pkgName == "main" {
		return
	}
	pkgPath := x.intern(strings.TrimPrefix(strings.TrimPrefix(dirname, "/src/"), "pkg/"))
	astPkg := ast.Package{
		Name: pkgName,
		Files: map[string]*ast.File{
			filename: astFile,
		},
	}
	var m doc.Mode
	docPkg := doc.New(&astPkg, dirname, m)
	addIdent := func(sk SpotKind, name string, docstr string) {
		if x.idents[sk] == nil {
			x.idents[sk] = make(map[string][]Ident)
		}
		name = x.intern(name)
		x.idents[sk][name] = append(x.idents[sk][name], Ident{
			Path:    pkgPath,
			Package: pkgName,
			Name:    name,
			Doc:     doc.Synopsis(docstr),
		})
	}

	if x.idents[PackageClause] == nil {
		x.idents[PackageClause] = make(map[string][]Ident)
	}
	// List of words under which the package identifier will be stored.
	// This includes the package name and the components of the directory
	// in which it resides.
	words := strings.Split(pathpkg.Dir(pkgPath), "/")
	if words[0] == "." {
		words = []string{}
	}
	name := x.intern(docPkg.Name)
	synopsis := doc.Synopsis(docPkg.Doc)
	words = append(words, name)
	pkgIdent := Ident{
		Path:    pkgPath,
		Package: pkgName,
		Name:    name,
		Doc:     synopsis,
	}
	for _, word := range words {
		word = x.intern(word)
		found := false
		pkgs := x.idents[PackageClause][word]
		for i, p := range pkgs {
			if p.Path == pkgPath {
				if docPkg.Doc != "" {
					p.Doc = synopsis
					pkgs[i] = p
				}
				found = true
				break
			}
		}
		if !found {
			x.idents[PackageClause][word] = append(x.idents[PackageClause][word], pkgIdent)
		}
	}

	for _, c := range docPkg.Consts {
		for _, name := range c.Names {
			addIdent(ConstDecl, name, c.Doc)
		}
	}
	for _, t := range docPkg.Types {
		addIdent(TypeDecl, t.Name, t.Doc)
		for _, c := range t.Consts {
			for _, name := range c.Names {
				addIdent(ConstDecl, name, c.Doc)
			}
		}
		for _, v := range t.Vars {
			for _, name := range v.Names {
				addIdent(VarDecl, name, v.Doc)
			}
		}
		for _, f := range t.Funcs {
			addIdent(FuncDecl, f.Name, f.Doc)
		}
		for _, f := range t.Methods {
			addIdent(MethodDecl, f.Name, f.Doc)
			// Change the name of methods to be "<typename>.<methodname>".
			// They will still be indexed as <methodname>.
			idents := x.idents[MethodDecl][f.Name]
			idents[len(idents)-1].Name = x.intern(t.Name + "." + f.Name)
		}
	}
	for _, v := range docPkg.Vars {
		for _, name := range v.Names {
			addIdent(VarDecl, name, v.Doc)
		}
	}
	for _, f := range docPkg.Funcs {
		addIdent(FuncDecl, f.Name, f.Doc)
	}
}
Пример #13
0
// Import returns details about the Go package named by the import path,
// interpreting local import paths relative to the src directory.  If the path
// is a local import path naming a package that can be imported using a
// standard import path, the returned package will set p.ImportPath to
// that path.
//
// In the directory containing the package, .go, .c, .h, and .s files are
// considered part of the package except for:
//
//	- .go files in package documentation
//	- files starting with _ or .
//	- files with build constraints not satisfied by the context
//
// If an error occurs, Import returns a non-nil error also returns a non-nil
// *Package containing partial information.
//
func (ctxt *Context) Import(path string, src string, mode ImportMode) (*Package, error) {
	p := &Package{
		ImportPath: path,
	}

	var pkga string
	if ctxt.Gccgo {
		dir, elem := pathpkg.Split(p.ImportPath)
		pkga = "pkg/gccgo/" + dir + "lib" + elem + ".a"
	} else {
		pkga = "pkg/" + ctxt.GOOS + "_" + ctxt.GOARCH + "/" + p.ImportPath + ".a"
	}

	binaryOnly := false
	if IsLocalImport(path) {
		if src == "" {
			return p, fmt.Errorf("import %q: import relative to unknown directory", path)
		}
		if !ctxt.isAbsPath(path) {
			p.Dir = ctxt.joinPath(src, path)
		}
		// Determine canonical import path, if any.
		if ctxt.GOROOT != "" {
			root := ctxt.joinPath(ctxt.GOROOT, "src", "pkg")
			if sub, ok := ctxt.hasSubdir(root, p.Dir); ok {
				p.Goroot = true
				p.ImportPath = sub
				p.Root = ctxt.GOROOT
				goto Found
			}
		}
		all := ctxt.gopath()
		for i, root := range all {
			rootsrc := ctxt.joinPath(root, "src")
			if sub, ok := ctxt.hasSubdir(rootsrc, p.Dir); ok {
				// We found a potential import path for dir,
				// but check that using it wouldn't find something
				// else first.
				if ctxt.GOROOT != "" {
					if dir := ctxt.joinPath(ctxt.GOROOT, "src", sub); ctxt.isDir(dir) {
						goto Found
					}
				}
				for _, earlyRoot := range all[:i] {
					if dir := ctxt.joinPath(earlyRoot, "src", sub); ctxt.isDir(dir) {
						goto Found
					}
				}

				// sub would not name some other directory instead of this one.
				// Record it.
				p.ImportPath = sub
				p.Root = root
				goto Found
			}
		}
		// It's okay that we didn't find a root containing dir.
		// Keep going with the information we have.
	} else {
		if strings.HasPrefix(path, "/") {
			return p, fmt.Errorf("import %q: cannot import absolute path", path)
		}
		// Determine directory from import path.
		if ctxt.GOROOT != "" {
			dir := ctxt.joinPath(ctxt.GOROOT, "src", "pkg", path)
			isDir := ctxt.isDir(dir)
			binaryOnly = !isDir && mode&AllowBinary != 0 && ctxt.isFile(ctxt.joinPath(ctxt.GOROOT, pkga))
			if isDir || binaryOnly {
				p.Dir = dir
				p.Goroot = true
				p.Root = ctxt.GOROOT
				goto Found
			}
		}
		for _, root := range ctxt.gopath() {
			dir := ctxt.joinPath(root, "src", path)
			isDir := ctxt.isDir(dir)
			binaryOnly = !isDir && mode&AllowBinary != 0 && ctxt.isFile(ctxt.joinPath(root, pkga))
			if isDir || binaryOnly {
				p.Dir = dir
				p.Root = root
				goto Found
			}
		}
		return p, fmt.Errorf("import %q: cannot find package", path)
	}

Found:
	if p.Root != "" {
		if p.Goroot {
			p.SrcRoot = ctxt.joinPath(p.Root, "src", "pkg")
		} else {
			p.SrcRoot = ctxt.joinPath(p.Root, "src")
		}
		p.PkgRoot = ctxt.joinPath(p.Root, "pkg")
		p.BinDir = ctxt.joinPath(p.Root, "bin")
		p.PkgObj = ctxt.joinPath(p.Root, pkga)
	}

	if mode&FindOnly != 0 {
		return p, nil
	}
	if binaryOnly && (mode&AllowBinary) != 0 {
		return p, nil
	}

	dirs, err := ctxt.readDir(p.Dir)
	if err != nil {
		return p, err
	}

	var Sfiles []string // files with ".S" (capital S)
	var firstFile string
	imported := make(map[string][]token.Position)
	testImported := make(map[string][]token.Position)
	xTestImported := make(map[string][]token.Position)
	fset := token.NewFileSet()
	for _, d := range dirs {
		if d.IsDir() {
			continue
		}
		name := d.Name()
		if strings.HasPrefix(name, "_") ||
			strings.HasPrefix(name, ".") {
			continue
		}
		if !ctxt.UseAllFiles && !ctxt.goodOSArchFile(name) {
			continue
		}

		i := strings.LastIndex(name, ".")
		if i < 0 {
			i = len(name)
		}
		ext := name[i:]
		switch ext {
		case ".go", ".c", ".s", ".h", ".S":
			// tentatively okay
		default:
			// skip
			continue
		}

		filename := ctxt.joinPath(p.Dir, name)
		f, err := ctxt.openFile(filename)
		if err != nil {
			return p, err
		}
		data, err := ioutil.ReadAll(f)
		f.Close()
		if err != nil {
			return p, fmt.Errorf("read %s: %v", filename, err)
		}

		// Look for +build comments to accept or reject the file.
		if !ctxt.UseAllFiles && !ctxt.shouldBuild(data) {
			continue
		}

		// Going to save the file.  For non-Go files, can stop here.
		switch ext {
		case ".c":
			p.CFiles = append(p.CFiles, name)
			continue
		case ".h":
			p.HFiles = append(p.HFiles, name)
			continue
		case ".s":
			p.SFiles = append(p.SFiles, name)
			continue
		case ".S":
			Sfiles = append(Sfiles, name)
			continue
		}

		pf, err := parser.ParseFile(fset, filename, data, parser.ImportsOnly|parser.ParseComments)
		if err != nil {
			return p, err
		}

		pkg := string(pf.Name.Name)
		if pkg == "documentation" {
			continue
		}

		isTest := strings.HasSuffix(name, "_test.go")
		isXTest := false
		if isTest && strings.HasSuffix(pkg, "_test") {
			isXTest = true
			pkg = pkg[:len(pkg)-len("_test")]
		}

		if p.Name == "" {
			p.Name = pkg
			firstFile = name
		} else if pkg != p.Name {
			return p, fmt.Errorf("found packages %s (%s) and %s (%s) in %s", p.Name, firstFile, pkg, name, p.Dir)
		}
		if pf.Doc != nil && p.Doc == "" {
			p.Doc = doc.Synopsis(pf.Doc.Text())
		}

		// Record imports and information about cgo.
		isCgo := false
		for _, decl := range pf.Decls {
			d, ok := decl.(*ast.GenDecl)
			if !ok {
				continue
			}
			for _, dspec := range d.Specs {
				spec, ok := dspec.(*ast.ImportSpec)
				if !ok {
					continue
				}
				quoted := string(spec.Path.Value)
				path, err := strconv.Unquote(quoted)
				if err != nil {
					log.Panicf("%s: parser returned invalid quoted string: <%s>", filename, quoted)
				}
				if isXTest {
					xTestImported[path] = append(xTestImported[path], fset.Position(spec.Pos()))
				} else if isTest {
					testImported[path] = append(testImported[path], fset.Position(spec.Pos()))
				} else {
					imported[path] = append(imported[path], fset.Position(spec.Pos()))
				}
				if path == "C" {
					if isTest {
						return p, fmt.Errorf("use of cgo in test %s not supported", filename)
					}
					cg := spec.Doc
					if cg == nil && len(d.Specs) == 1 {
						cg = d.Doc
					}
					if cg != nil {
						if err := ctxt.saveCgo(filename, p, cg); err != nil {
							return p, err
						}
					}
					isCgo = true
				}
			}
		}
		if isCgo {
			if ctxt.CgoEnabled {
				p.CgoFiles = append(p.CgoFiles, name)
			}
		} else if isXTest {
			p.XTestGoFiles = append(p.XTestGoFiles, name)
		} else if isTest {
			p.TestGoFiles = append(p.TestGoFiles, name)
		} else {
			p.GoFiles = append(p.GoFiles, name)
		}
	}
	if p.Name == "" {
		return p, fmt.Errorf("no Go source files in %s", p.Dir)
	}

	p.Imports, p.ImportPos = cleanImports(imported)
	p.TestImports, p.TestImportPos = cleanImports(testImported)
	p.XTestImports, p.XTestImportPos = cleanImports(xTestImported)

	// add the .S files only if we are using cgo
	// (which means gcc will compile them).
	// The standard assemblers expect .s files.
	if len(p.CgoFiles) > 0 {
		p.SFiles = append(p.SFiles, Sfiles...)
		sort.Strings(p.SFiles)
	}

	return p, nil
}
Пример #14
0
func (p *Package) Synopsis() string {
	if p.dpkg != nil {
		return doc.Synopsis(p.dpkg.Doc)
	}
	return ""
}
Пример #15
0
// Import returns details about the Go package named by the import path,
// interpreting local import paths relative to the srcDir directory.
// If the path is a local import path naming a package that can be imported
// using a standard import path, the returned package will set p.ImportPath
// to that path.
//
// In the directory containing the package, .go, .c, .h, and .s files are
// considered part of the package except for:
//
//	- .go files in package documentation
//	- files starting with _ or . (likely editor temporary files)
//	- files with build constraints not satisfied by the context
//
// If an error occurs, Import returns a non-nil error and a non-nil
// *Package containing partial information.
//
func (ctxt *Context) Import(path string, srcDir string, mode ImportMode) (*Package, error) {
	p := &Package{
		ImportPath: path,
	}
	if path == "" {
		return p, fmt.Errorf("import %q: invalid import path", path)
	}

	var pkgtargetroot string
	var pkga string
	var pkgerr error
	suffix := ""
	if ctxt.InstallSuffix != "" {
		suffix = "_" + ctxt.InstallSuffix
	}
	switch ctxt.Compiler {
	case "gccgo":
		pkgtargetroot = "pkg/gccgo_" + ctxt.GOOS + "_" + ctxt.GOARCH + suffix
	case "gc":
		pkgtargetroot = "pkg/" + ctxt.GOOS + "_" + ctxt.GOARCH + suffix
	default:
		// Save error for end of function.
		pkgerr = fmt.Errorf("import %q: unknown compiler %q", path, ctxt.Compiler)
	}
	setPkga := func() {
		switch ctxt.Compiler {
		case "gccgo":
			dir, elem := pathpkg.Split(p.ImportPath)
			pkga = pkgtargetroot + "/" + dir + "lib" + elem + ".a"
		case "gc":
			pkga = pkgtargetroot + "/" + p.ImportPath + ".a"
		}
	}
	setPkga()

	binaryOnly := false
	if IsLocalImport(path) {
		pkga = "" // local imports have no installed path
		if srcDir == "" {
			return p, fmt.Errorf("import %q: import relative to unknown directory", path)
		}
		if !ctxt.isAbsPath(path) {
			p.Dir = ctxt.joinPath(srcDir, path)
		}
		// Determine canonical import path, if any.
		// Exclude results where the import path would include /testdata/.
		inTestdata := func(sub string) bool {
			return strings.Contains(sub, "/testdata/") || strings.HasSuffix(sub, "/testdata") || strings.HasPrefix(sub, "testdata/") || sub == "testdata"
		}
		if ctxt.GOROOT != "" {
			root := ctxt.joinPath(ctxt.GOROOT, "src")
			if sub, ok := ctxt.hasSubdir(root, p.Dir); ok && !inTestdata(sub) {
				p.Goroot = true
				p.ImportPath = sub
				p.Root = ctxt.GOROOT
				goto Found
			}
		}
		all := ctxt.gopath()
		for i, root := range all {
			rootsrc := ctxt.joinPath(root, "src")
			if sub, ok := ctxt.hasSubdir(rootsrc, p.Dir); ok && !inTestdata(sub) {
				// We found a potential import path for dir,
				// but check that using it wouldn't find something
				// else first.
				if ctxt.GOROOT != "" {
					if dir := ctxt.joinPath(ctxt.GOROOT, "src", sub); ctxt.isDir(dir) {
						p.ConflictDir = dir
						goto Found
					}
				}
				for _, earlyRoot := range all[:i] {
					if dir := ctxt.joinPath(earlyRoot, "src", sub); ctxt.isDir(dir) {
						p.ConflictDir = dir
						goto Found
					}
				}

				// sub would not name some other directory instead of this one.
				// Record it.
				p.ImportPath = sub
				p.Root = root
				goto Found
			}
		}
		// It's okay that we didn't find a root containing dir.
		// Keep going with the information we have.
	} else {
		if strings.HasPrefix(path, "/") {
			return p, fmt.Errorf("import %q: cannot import absolute path", path)
		}

		// tried records the location of unsuccessful package lookups
		var tried struct {
			vendor []string
			goroot string
			gopath []string
		}
		gopath := ctxt.gopath()

		// Vendor directories get first chance to satisfy import.
		if mode&AllowVendor != 0 && srcDir != "" {
			searchVendor := func(root string, isGoroot bool) bool {
				sub, ok := ctxt.hasSubdir(root, srcDir)
				if !ok || !strings.HasPrefix(sub, "src/") || strings.Contains(sub, "/testdata/") {
					return false
				}
				for {
					vendor := ctxt.joinPath(root, sub, "vendor")
					if ctxt.isDir(vendor) {
						dir := ctxt.joinPath(vendor, path)
						if ctxt.isDir(dir) {
							p.Dir = dir
							p.ImportPath = strings.TrimPrefix(pathpkg.Join(sub, "vendor", path), "src/")
							p.Goroot = isGoroot
							p.Root = root
							setPkga() // p.ImportPath changed
							return true
						}
						tried.vendor = append(tried.vendor, dir)
					}
					i := strings.LastIndex(sub, "/")
					if i < 0 {
						break
					}
					sub = sub[:i]
				}
				return false
			}
			if searchVendor(ctxt.GOROOT, true) {
				goto Found
			}
			for _, root := range gopath {
				if searchVendor(root, false) {
					goto Found
				}
			}
		}

		// Determine directory from import path.
		if ctxt.GOROOT != "" {
			dir := ctxt.joinPath(ctxt.GOROOT, "src", path)
			isDir := ctxt.isDir(dir)
			binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(ctxt.GOROOT, pkga))
			if isDir || binaryOnly {
				p.Dir = dir
				p.Goroot = true
				p.Root = ctxt.GOROOT
				goto Found
			}
			tried.goroot = dir
		}
		for _, root := range gopath {
			dir := ctxt.joinPath(root, "src", path)
			isDir := ctxt.isDir(dir)
			binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(root, pkga))
			if isDir || binaryOnly {
				p.Dir = dir
				p.Root = root
				goto Found
			}
			tried.gopath = append(tried.gopath, dir)
		}

		// package was not found
		var paths []string
		format := "\t%s (vendor tree)"
		for _, dir := range tried.vendor {
			paths = append(paths, fmt.Sprintf(format, dir))
			format = "\t%s"
		}
		if tried.goroot != "" {
			paths = append(paths, fmt.Sprintf("\t%s (from $GOROOT)", tried.goroot))
		} else {
			paths = append(paths, "\t($GOROOT not set)")
		}
		format = "\t%s (from $GOPATH)"
		for _, dir := range tried.gopath {
			paths = append(paths, fmt.Sprintf(format, dir))
			format = "\t%s"
		}
		if len(tried.gopath) == 0 {
			paths = append(paths, "\t($GOPATH not set)")
		}
		return p, fmt.Errorf("cannot find package %q in any of:\n%s", path, strings.Join(paths, "\n"))
	}

Found:
	if p.Root != "" {
		p.SrcRoot = ctxt.joinPath(p.Root, "src")
		p.PkgRoot = ctxt.joinPath(p.Root, "pkg")
		p.BinDir = ctxt.joinPath(p.Root, "bin")
		if pkga != "" {
			p.PkgTargetRoot = ctxt.joinPath(p.Root, pkgtargetroot)
			p.PkgObj = ctxt.joinPath(p.Root, pkga)
		}
	}

	if mode&FindOnly != 0 {
		return p, pkgerr
	}
	if binaryOnly && (mode&AllowBinary) != 0 {
		return p, pkgerr
	}

	dirs, err := ctxt.readDir(p.Dir)
	if err != nil {
		return p, err
	}

	var badGoError error
	var Sfiles []string // files with ".S" (capital S)
	var firstFile, firstCommentFile string
	imported := make(map[string][]token.Position)
	testImported := make(map[string][]token.Position)
	xTestImported := make(map[string][]token.Position)
	allTags := make(map[string]bool)
	fset := token.NewFileSet()
	for _, d := range dirs {
		if d.IsDir() {
			continue
		}

		name := d.Name()
		ext := nameExt(name)

		badFile := func(err error) {
			if badGoError == nil {
				badGoError = err
			}
			p.InvalidGoFiles = append(p.InvalidGoFiles, name)
		}

		match, data, filename, err := ctxt.matchFile(p.Dir, name, true, allTags)
		if err != nil {
			badFile(err)
			continue
		}
		if !match {
			if ext == ".go" {
				p.IgnoredGoFiles = append(p.IgnoredGoFiles, name)
			}
			continue
		}

		// Going to save the file.  For non-Go files, can stop here.
		switch ext {
		case ".c":
			p.CFiles = append(p.CFiles, name)
			continue
		case ".cc", ".cpp", ".cxx":
			p.CXXFiles = append(p.CXXFiles, name)
			continue
		case ".m":
			p.MFiles = append(p.MFiles, name)
			continue
		case ".h", ".hh", ".hpp", ".hxx":
			p.HFiles = append(p.HFiles, name)
			continue
		case ".s":
			p.SFiles = append(p.SFiles, name)
			continue
		case ".S":
			Sfiles = append(Sfiles, name)
			continue
		case ".swig":
			p.SwigFiles = append(p.SwigFiles, name)
			continue
		case ".swigcxx":
			p.SwigCXXFiles = append(p.SwigCXXFiles, name)
			continue
		case ".syso":
			// binary objects to add to package archive
			// Likely of the form foo_windows.syso, but
			// the name was vetted above with goodOSArchFile.
			p.SysoFiles = append(p.SysoFiles, name)
			continue
		}

		pf, err := parser.ParseFile(fset, filename, data, parser.ImportsOnly|parser.ParseComments)
		if err != nil {
			badFile(err)
			continue
		}

		pkg := pf.Name.Name
		if pkg == "documentation" {
			p.IgnoredGoFiles = append(p.IgnoredGoFiles, name)
			continue
		}

		isTest := strings.HasSuffix(name, "_test.go")
		isXTest := false
		if isTest && strings.HasSuffix(pkg, "_test") {
			isXTest = true
			pkg = pkg[:len(pkg)-len("_test")]
		}

		if p.Name == "" {
			p.Name = pkg
			firstFile = name
		} else if pkg != p.Name {
			badFile(&MultiplePackageError{
				Dir:      p.Dir,
				Packages: []string{p.Name, pkg},
				Files:    []string{firstFile, name},
			})
			p.InvalidGoFiles = append(p.InvalidGoFiles, name)
		}
		if pf.Doc != nil && p.Doc == "" {
			p.Doc = doc.Synopsis(pf.Doc.Text())
		}

		if mode&ImportComment != 0 {
			qcom, line := findImportComment(data)
			if line != 0 {
				com, err := strconv.Unquote(qcom)
				if err != nil {
					badFile(fmt.Errorf("%s:%d: cannot parse import comment", filename, line))
				} else if p.ImportComment == "" {
					p.ImportComment = com
					firstCommentFile = name
				} else if p.ImportComment != com {
					badFile(fmt.Errorf("found import comments %q (%s) and %q (%s) in %s", p.ImportComment, firstCommentFile, com, name, p.Dir))
				}
			}
		}

		// Record imports and information about cgo.
		isCgo := false
		for _, decl := range pf.Decls {
			d, ok := decl.(*ast.GenDecl)
			if !ok {
				continue
			}
			for _, dspec := range d.Specs {
				spec, ok := dspec.(*ast.ImportSpec)
				if !ok {
					continue
				}
				quoted := spec.Path.Value
				path, err := strconv.Unquote(quoted)
				if err != nil {
					log.Panicf("%s: parser returned invalid quoted string: <%s>", filename, quoted)
				}
				if isXTest {
					xTestImported[path] = append(xTestImported[path], fset.Position(spec.Pos()))
				} else if isTest {
					testImported[path] = append(testImported[path], fset.Position(spec.Pos()))
				} else {
					imported[path] = append(imported[path], fset.Position(spec.Pos()))
				}
				if path == "C" {
					if isTest {
						badFile(fmt.Errorf("use of cgo in test %s not supported", filename))
					} else {
						cg := spec.Doc
						if cg == nil && len(d.Specs) == 1 {
							cg = d.Doc
						}
						if cg != nil {
							if err := ctxt.saveCgo(filename, p, cg); err != nil {
								badFile(err)
							}
						}
						isCgo = true
					}
				}
			}
		}
		if isCgo {
			allTags["cgo"] = true
			if ctxt.CgoEnabled {
				p.CgoFiles = append(p.CgoFiles, name)
			} else {
				p.IgnoredGoFiles = append(p.IgnoredGoFiles, name)
			}
		} else if isXTest {
			p.XTestGoFiles = append(p.XTestGoFiles, name)
		} else if isTest {
			p.TestGoFiles = append(p.TestGoFiles, name)
		} else {
			p.GoFiles = append(p.GoFiles, name)
		}
	}
	if badGoError != nil {
		return p, badGoError
	}
	if len(p.GoFiles)+len(p.CgoFiles)+len(p.TestGoFiles)+len(p.XTestGoFiles) == 0 {
		return p, &NoGoError{p.Dir}
	}

	for tag := range allTags {
		p.AllTags = append(p.AllTags, tag)
	}
	sort.Strings(p.AllTags)

	p.Imports, p.ImportPos = cleanImports(imported)
	p.TestImports, p.TestImportPos = cleanImports(testImported)
	p.XTestImports, p.XTestImportPos = cleanImports(xTestImported)

	// add the .S files only if we are using cgo
	// (which means gcc will compile them).
	// The standard assemblers expect .s files.
	if len(p.CgoFiles) > 0 {
		p.SFiles = append(p.SFiles, Sfiles...)
		sort.Strings(p.SFiles)
	}

	return p, pkgerr
}
Пример #16
0
// Import returns details about the Go package named by the import path,
// interpreting local import paths relative to the srcDir directory.
// If the path is a local import path naming a package that can be imported
// using a standard import path, the returned package will set p.ImportPath
// to that path.
//
// In the directory containing the package, .go, .c, .h, and .s files are
// considered part of the package except for:
//
//	- .go files in package documentation
//	- files starting with _ or . (likely editor temporary files)
//	- files with build constraints not satisfied by the context
//
// If an error occurs, Import returns a non-nil error and a non-nil
// *Package containing partial information.
//
func (ctxt *Context) Import(path string, srcDir string, mode ImportMode) (*Package, error) {
	p := &Package{
		ImportPath: path,
	}
	if path == "" {
		return p, fmt.Errorf("import %q: invalid import path", path)
	}

	var pkga string
	var pkgerr error
	switch ctxt.Compiler {
	case "gccgo":
		dir, elem := pathpkg.Split(p.ImportPath)
		pkga = "pkg/gccgo/" + dir + "lib" + elem + ".a"
	case "gc":
		tag := ""
		if ctxt.InstallTag != "" {
			tag = "_" + ctxt.InstallTag
		}
		pkga = "pkg/" + ctxt.GOOS + "_" + ctxt.GOARCH + tag + "/" + p.ImportPath + ".a"
	default:
		// Save error for end of function.
		pkgerr = fmt.Errorf("import %q: unknown compiler %q", path, ctxt.Compiler)
	}

	binaryOnly := false
	if IsLocalImport(path) {
		pkga = "" // local imports have no installed path
		if srcDir == "" {
			return p, fmt.Errorf("import %q: import relative to unknown directory", path)
		}
		if !ctxt.isAbsPath(path) {
			p.Dir = ctxt.joinPath(srcDir, path)
		}
		// Determine canonical import path, if any.
		if ctxt.GOROOT != "" {
			root := ctxt.joinPath(ctxt.GOROOT, "src", "pkg")
			if sub, ok := ctxt.hasSubdir(root, p.Dir); ok {
				p.Goroot = true
				p.ImportPath = sub
				p.Root = ctxt.GOROOT
				goto Found
			}
		}
		all := ctxt.gopath()
		for i, root := range all {
			rootsrc := ctxt.joinPath(root, "src")
			if sub, ok := ctxt.hasSubdir(rootsrc, p.Dir); ok {
				// We found a potential import path for dir,
				// but check that using it wouldn't find something
				// else first.
				if ctxt.GOROOT != "" {
					if dir := ctxt.joinPath(ctxt.GOROOT, "src", "pkg", sub); ctxt.isDir(dir) {
						goto Found
					}
				}
				for _, earlyRoot := range all[:i] {
					if dir := ctxt.joinPath(earlyRoot, "src", sub); ctxt.isDir(dir) {
						goto Found
					}
				}

				// sub would not name some other directory instead of this one.
				// Record it.
				p.ImportPath = sub
				p.Root = root
				goto Found
			}
		}
		// It's okay that we didn't find a root containing dir.
		// Keep going with the information we have.
	} else {
		if strings.HasPrefix(path, "/") {
			return p, fmt.Errorf("import %q: cannot import absolute path", path)
		}

		// tried records the location of unsucsessful package lookups
		var tried struct {
			goroot string
			gopath []string
		}

		// Determine directory from import path.
		if ctxt.GOROOT != "" {
			dir := ctxt.joinPath(ctxt.GOROOT, "src", "pkg", path)
			isDir := ctxt.isDir(dir)
			binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(ctxt.GOROOT, pkga))
			if isDir || binaryOnly {
				p.Dir = dir
				p.Goroot = true
				p.Root = ctxt.GOROOT
				goto Found
			}
			tried.goroot = dir
		}
		for _, root := range ctxt.gopath() {
			dir := ctxt.joinPath(root, "src", path)
			isDir := ctxt.isDir(dir)
			binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(root, pkga))
			if isDir || binaryOnly {
				p.Dir = dir
				p.Root = root
				goto Found
			}
			tried.gopath = append(tried.gopath, dir)
		}

		// package was not found
		var paths []string
		if tried.goroot != "" {
			paths = append(paths, fmt.Sprintf("\t%s (from $GOROOT)", tried.goroot))
		} else {
			paths = append(paths, "\t($GOROOT not set)")
		}
		var i int
		var format = "\t%s (from $GOPATH)"
		for ; i < len(tried.gopath); i++ {
			if i > 0 {
				format = "\t%s"
			}
			paths = append(paths, fmt.Sprintf(format, tried.gopath[i]))
		}
		if i == 0 {
			paths = append(paths, "\t($GOPATH not set)")
		}
		return p, fmt.Errorf("cannot find package %q in any of:\n%s", path, strings.Join(paths, "\n"))
	}

Found:
	if p.Root != "" {
		if p.Goroot {
			p.SrcRoot = ctxt.joinPath(p.Root, "src", "pkg")
		} else {
			p.SrcRoot = ctxt.joinPath(p.Root, "src")
		}
		p.PkgRoot = ctxt.joinPath(p.Root, "pkg")
		p.BinDir = ctxt.joinPath(p.Root, "bin")
		if pkga != "" {
			p.PkgObj = ctxt.joinPath(p.Root, pkga)
		}
	}

	if mode&FindOnly != 0 {
		return p, pkgerr
	}
	if binaryOnly && (mode&AllowBinary) != 0 {
		return p, pkgerr
	}

	dirs, err := ctxt.readDir(p.Dir)
	if err != nil {
		return p, err
	}

	var Sfiles []string // files with ".S" (capital S)
	var firstFile string
	imported := make(map[string][]token.Position)
	testImported := make(map[string][]token.Position)
	xTestImported := make(map[string][]token.Position)
	fset := token.NewFileSet()
	for _, d := range dirs {
		if d.IsDir() {
			continue
		}
		name := d.Name()
		if strings.HasPrefix(name, "_") ||
			strings.HasPrefix(name, ".") {
			continue
		}

		i := strings.LastIndex(name, ".")
		if i < 0 {
			i = len(name)
		}
		ext := name[i:]

		if !ctxt.UseAllFiles && !ctxt.goodOSArchFile(name) {
			if ext == ".go" {
				p.IgnoredGoFiles = append(p.IgnoredGoFiles, name)
			}
			continue
		}

		switch ext {
		case ".go", ".c", ".s", ".h", ".S", ".swig", ".swigcxx":
			// tentatively okay - read to make sure
		case ".syso":
			// binary objects to add to package archive
			// Likely of the form foo_windows.syso, but
			// the name was vetted above with goodOSArchFile.
			p.SysoFiles = append(p.SysoFiles, name)
			continue
		default:
			// skip
			continue
		}

		filename := ctxt.joinPath(p.Dir, name)
		f, err := ctxt.openFile(filename)
		if err != nil {
			return p, err
		}

		var data []byte
		if strings.HasSuffix(filename, ".go") {
			data, err = readImports(f, false)
		} else {
			data, err = readComments(f)
		}
		f.Close()
		if err != nil {
			return p, fmt.Errorf("read %s: %v", filename, err)
		}

		// Look for +build comments to accept or reject the file.
		if !ctxt.UseAllFiles && !ctxt.shouldBuild(data) {
			if ext == ".go" {
				p.IgnoredGoFiles = append(p.IgnoredGoFiles, name)
			}
			continue
		}

		// Going to save the file.  For non-Go files, can stop here.
		switch ext {
		case ".c":
			p.CFiles = append(p.CFiles, name)
			continue
		case ".h":
			p.HFiles = append(p.HFiles, name)
			continue
		case ".s":
			p.SFiles = append(p.SFiles, name)
			continue
		case ".S":
			Sfiles = append(Sfiles, name)
			continue
		case ".swig":
			p.SwigFiles = append(p.SwigFiles, name)
			continue
		case ".swigcxx":
			p.SwigCXXFiles = append(p.SwigCXXFiles, name)
			continue
		}

		pf, err := parser.ParseFile(fset, filename, data, parser.ImportsOnly|parser.ParseComments)
		if err != nil {
			return p, err
		}

		pkg := pf.Name.Name
		if pkg == "documentation" {
			p.IgnoredGoFiles = append(p.IgnoredGoFiles, name)
			continue
		}

		isTest := strings.HasSuffix(name, "_test.go")
		isXTest := false
		if isTest && strings.HasSuffix(pkg, "_test") {
			isXTest = true
			pkg = pkg[:len(pkg)-len("_test")]
		}

		if p.Name == "" {
			p.Name = pkg
			firstFile = name
		} else if pkg != p.Name {
			return p, fmt.Errorf("found packages %s (%s) and %s (%s) in %s", p.Name, firstFile, pkg, name, p.Dir)
		}
		if pf.Doc != nil && p.Doc == "" {
			p.Doc = doc.Synopsis(pf.Doc.Text())
		}

		// Record imports and information about cgo.
		isCgo := false
		for _, decl := range pf.Decls {
			d, ok := decl.(*ast.GenDecl)
			if !ok {
				continue
			}
			for _, dspec := range d.Specs {
				spec, ok := dspec.(*ast.ImportSpec)
				if !ok {
					continue
				}
				quoted := spec.Path.Value
				path, err := strconv.Unquote(quoted)
				if err != nil {
					log.Panicf("%s: parser returned invalid quoted string: <%s>", filename, quoted)
				}
				if isXTest {
					xTestImported[path] = append(xTestImported[path], fset.Position(spec.Pos()))
				} else if isTest {
					testImported[path] = append(testImported[path], fset.Position(spec.Pos()))
				} else {
					imported[path] = append(imported[path], fset.Position(spec.Pos()))
				}
				if path == "C" {
					if isTest {
						return p, fmt.Errorf("use of cgo in test %s not supported", filename)
					}
					cg := spec.Doc
					if cg == nil && len(d.Specs) == 1 {
						cg = d.Doc
					}
					if cg != nil {
						if err := ctxt.saveCgo(filename, p, cg); err != nil {
							return p, err
						}
					}
					isCgo = true
				}
			}
		}
		if isCgo {
			if ctxt.CgoEnabled {
				p.CgoFiles = append(p.CgoFiles, name)
			}
		} else if isXTest {
			p.XTestGoFiles = append(p.XTestGoFiles, name)
		} else if isTest {
			p.TestGoFiles = append(p.TestGoFiles, name)
		} else {
			p.GoFiles = append(p.GoFiles, name)
		}
	}
	if p.Name == "" {
		return p, &NoGoError{p.Dir}
	}

	p.Imports, p.ImportPos = cleanImports(imported)
	p.TestImports, p.TestImportPos = cleanImports(testImported)
	p.XTestImports, p.XTestImportPos = cleanImports(xTestImported)

	// add the .S files only if we are using cgo
	// (which means gcc will compile them).
	// The standard assemblers expect .s files.
	if len(p.CgoFiles) > 0 {
		p.SFiles = append(p.SFiles, Sfiles...)
		sort.Strings(p.SFiles)
	}

	return p, pkgerr
}
Пример #17
0
func scanPackage(ctxt *build.Context, t *build.Tree, arg, importPath, dir string, stk *importStack, useAllFiles bool) *Package {
	// Read the files in the directory to learn the structure
	// of the package.
	p := &Package{
		ImportPath: importPath,
		Dir:        dir,
		Standard:   t.Goroot && !strings.Contains(importPath, "."),
		t:          t,
	}
	packageCache[dir] = p
	packageCache[importPath] = p

	ctxt.UseAllFiles = useAllFiles
	info, err := ctxt.ScanDir(dir)
	useAllFiles = false // flag does not apply to dependencies
	if err != nil {
		p.Error = &PackageError{
			ImportStack: stk.copy(),
			Err:         err.Error(),
		}
		// Look for parser errors.
		if err, ok := err.(scanner.ErrorList); ok {
			// Prepare error with \n before each message.
			// When printed in something like context: %v
			// this will put the leading file positions each on
			// its own line.  It will also show all the errors
			// instead of just the first, as err.Error does.
			var buf bytes.Buffer
			for _, e := range err {
				buf.WriteString("\n")
				buf.WriteString(e.Error())
			}
			p.Error.Err = buf.String()
		}
		p.Incomplete = true
		return p
	}

	p.info = info
	p.Name = info.Package
	p.Doc = doc.Synopsis(info.PackageComment.Text())
	p.Imports = info.Imports
	p.GoFiles = info.GoFiles
	p.TestGoFiles = info.TestGoFiles
	p.XTestGoFiles = info.XTestGoFiles
	p.CFiles = info.CFiles
	p.HFiles = info.HFiles
	p.SFiles = info.SFiles
	p.CgoFiles = info.CgoFiles
	p.CgoCFLAGS = info.CgoCFLAGS
	p.CgoLDFLAGS = info.CgoLDFLAGS

	if info.Package == "main" {
		_, elem := filepath.Split(importPath)
		full := ctxt.GOOS + "_" + ctxt.GOARCH + "/" + elem
		if t.Goroot && isGoTool[p.ImportPath] {
			p.target = filepath.Join(t.Path, "pkg/tool", full)
		} else {
			if ctxt.GOOS != toolGOOS || ctxt.GOARCH != toolGOARCH {
				// Install cross-compiled binaries to subdirectories of bin.
				elem = full
			}
			p.target = filepath.Join(t.BinDir(), elem)
		}
		if ctxt.GOOS == "windows" {
			p.target += ".exe"
		}
	} else {
		dir := t.PkgDir()
		// For gccgo, rewrite p.target with the expected library name.
		if _, ok := buildToolchain.(gccgoToolchain); ok {
			dir = filepath.Join(filepath.Dir(dir), "gccgo", filepath.Base(dir))
		}
		p.target = buildToolchain.pkgpath(dir, p)
	}

	var built time.Time
	if fi, err := os.Stat(p.target); err == nil {
		built = fi.ModTime()
	}

	// Build list of full paths to all Go files in the package,
	// for use by commands like go fmt.
	for _, f := range info.GoFiles {
		p.gofiles = append(p.gofiles, filepath.Join(dir, f))
	}
	for _, f := range info.CgoFiles {
		p.gofiles = append(p.gofiles, filepath.Join(dir, f))
	}
	for _, f := range info.TestGoFiles {
		p.gofiles = append(p.gofiles, filepath.Join(dir, f))
	}
	for _, f := range info.XTestGoFiles {
		p.gofiles = append(p.gofiles, filepath.Join(dir, f))
	}

	sort.Strings(p.gofiles)

	srcss := [][]string{
		p.GoFiles,
		p.CFiles,
		p.HFiles,
		p.SFiles,
		p.CgoFiles,
	}
Stale:
	for _, srcs := range srcss {
		for _, src := range srcs {
			if fi, err := os.Stat(filepath.Join(p.Dir, src)); err != nil || fi.ModTime().After(built) {
				//println("STALE", p.ImportPath, "needs", src, err)
				p.Stale = true
				break Stale
			}
		}
	}

	importPaths := p.Imports
	// Packages that use cgo import runtime/cgo implicitly,
	// except runtime/cgo itself.
	if len(info.CgoFiles) > 0 && (!p.Standard || p.ImportPath != "runtime/cgo") {
		importPaths = append(importPaths, "runtime/cgo")
	}
	// Everything depends on runtime, except runtime and unsafe.
	if !p.Standard || (p.ImportPath != "runtime" && p.ImportPath != "unsafe") {
		importPaths = append(importPaths, "runtime")
	}

	// Record package under both import path and full directory name.
	packageCache[dir] = p
	packageCache[importPath] = p

	// Build list of imported packages and full dependency list.
	imports := make([]*Package, 0, len(p.Imports))
	deps := make(map[string]bool)
	for _, path := range importPaths {
		if path == "C" {
			continue
		}
		deps[path] = true
		p1 := loadPackage(path, stk)
		if p1.Error != nil {
			if info.ImportPos != nil && len(info.ImportPos[path]) > 0 {
				pos := info.ImportPos[path][0]
				p1.Error.Pos = pos.String()
			}
		}
		imports = append(imports, p1)
		for _, dep := range p1.Deps {
			deps[dep] = true
		}
		if p1.Stale {
			p.Stale = true
		}
		if p1.Incomplete {
			p.Incomplete = true
		}
		// p1.target can be empty only if p1 is not a real package,
		// such as package unsafe or the temporary packages
		// created during go test.
		if !p.Stale && p1.target != "" {
			if fi, err := os.Stat(p1.target); err != nil || fi.ModTime().After(built) {
				//println("STALE", p.ImportPath, "needs", p1.target, err)
				//println("BUILT", built.String(), "VS", fi.ModTime().String())
				p.Stale = true
			}
		}
	}
	p.imports = imports

	p.Deps = make([]string, 0, len(deps))
	for dep := range deps {
		p.Deps = append(p.Deps, dep)
	}
	sort.Strings(p.Deps)
	for _, dep := range p.Deps {
		p1 := packageCache[dep]
		if p1 == nil {
			panic("impossible: missing entry in package cache for " + dep + " imported by " + p.ImportPath)
		}
		p.deps = append(p.deps, p1)
		if p1.Error != nil {
			p.DepsErrors = append(p.DepsErrors, p1.Error)
		}
	}

	// unsafe is a fake package and is never out-of-date.
	if p.Standard && p.ImportPath == "unsafe" {
		p.Stale = false
		p.target = ""
	}

	p.Target = p.target

	return p
}