Example #1
0
func RewriteGeneratedGogoProtobufFile(name string, extractFn ExtractFunc, optionalFn OptionalFunc, header []byte) error {
	return rewriteFile(name, header, func(fset *token.FileSet, file *ast.File) error {
		cmap := ast.NewCommentMap(fset, file, file.Comments)

		// transform methods that point to optional maps or slices
		for _, d := range file.Decls {
			rewriteOptionalMethods(d, optionalFn)
		}

		// remove types that are already declared
		decls := []ast.Decl{}
		for _, d := range file.Decls {
			if dropExistingTypeDeclarations(d, extractFn) {
				continue
			}
			if dropEmptyImportDeclarations(d) {
				continue
			}
			decls = append(decls, d)
		}
		file.Decls = decls

		// remove unmapped comments
		file.Comments = cmap.Filter(file).Comments()
		return nil
	})
}
Example #2
0
// ParseFile will create a Build from the file path that
// was passed. FileSet of the Build will only contain a
// single file.
func ParseFile(path string) (*Build, error) {
	var fileSet token.FileSet

	astTree, err := parser.ParseFile(&fileSet, path, nil, parser.AllErrors|parser.ParseComments)
	if err != nil {
		return nil, err
	}

	fileName := filepath.Base(path)

	// create a comment map from file
	commentMap := ast.NewCommentMap(&fileSet, astTree, astTree.Comments)

	// create new build for the file
	build := NewBuild()
	fileAST, err := ParseFileAST(fileName, astTree, commentMap)
	if err != nil {
		return nil, err
	}

	// add parsed file to the build file set
	build.AddFile(fileName, fileAST)

	return build, nil
}
Example #3
0
// rewriteFile applies the rewrite rule 'pattern -> replace' to an entire file.
func rewriteFile(pattern, replace ast.Expr, p *ast.File) *ast.File {
	cmap := ast.NewCommentMap(fileSet, p, p.Comments)
	m := make(map[string]reflect.Value)
	pat := reflect.ValueOf(pattern)
	repl := reflect.ValueOf(replace)

	var rewriteVal func(val reflect.Value) reflect.Value
	rewriteVal = func(val reflect.Value) reflect.Value {
		// don't bother if val is invalid to start with
		if !val.IsValid() {
			return reflect.Value{}
		}
		for k := range m {
			delete(m, k)
		}
		val = apply(rewriteVal, val)
		if match(m, pat, val) {
			val = subst(m, repl, reflect.ValueOf(val.Interface().(ast.Node).Pos()))
		}
		return val
	}

	r := apply(rewriteVal, reflect.ValueOf(p)).Interface().(*ast.File)
	r.Comments = cmap.Filter(r).Comments() // recreate comments list
	return r
}
Example #4
0
func processFile(filename string) []Function {
	var res []Function
	fset := token.NewFileSet() // positions are relative to fset
	f, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
	if err != nil {
		panic(err)
	}

	cmap := ast.NewCommentMap(fset, f, f.Comments)
	ast.Inspect(f, func(n ast.Node) bool {
		switch x := n.(type) {
		case *ast.FuncDecl:
			fun := Function{Begin: fset.Position(x.Pos()).Line,
				Package:  f.Name.String(),
				Name:     x.Name.String(),
				End:      fset.Position(x.End()).Line,
				Filepath: fset.Position(x.Pos()).Filename}

			fun.getTSpec(cmap[n])
			res = append(res, fun)
		}
		return true
	})
	return res
}
Example #5
0
func (gas *Analyzer) process(filename string, source interface{}) error {
	mode := parser.ParseComments
	root, err := parser.ParseFile(gas.context.FileSet, filename, source, mode)
	if err == nil {
		gas.context.Comments = ast.NewCommentMap(gas.context.FileSet, root, root.Comments)
		gas.context.Root = root

		// here we get type info
		gas.context.Info = &types.Info{
			Types:      make(map[ast.Expr]types.TypeAndValue),
			Defs:       make(map[*ast.Ident]types.Object),
			Uses:       make(map[*ast.Ident]types.Object),
			Selections: make(map[*ast.SelectorExpr]*types.Selection),
			Scopes:     make(map[ast.Node]*types.Scope),
			Implicits:  make(map[ast.Node]types.Object),
		}

		conf := types.Config{Importer: importer.Default()}
		gas.context.Pkg, _ = conf.Check("pkg", gas.context.FileSet, []*ast.File{root}, gas.context.Info)
		if err != nil {
			gas.logger.Println("failed to check imports")
			return err
		}

		ast.Walk(gas, root)
		gas.Stats.NumFiles++
	}
	return err
}
Example #6
0
// packageExports is a local implementation of ast.PackageExports
// which correctly updates each package file's comment list.
// (The ast.PackageExports signature is frozen, hence the local
// implementation).
//
func packageExports(fset *token.FileSet, pkg *ast.Package) {
	for _, src := range pkg.Files {
		cmap := ast.NewCommentMap(fset, src, src.Comments)
		ast.FileExports(src)
		src.Comments = cmap.Filter(src).Comments()
	}
}
Example #7
0
// filterInfo updates info to include only the nodes that match the given
// filter args.
func filterInfo(args []string, info *PageInfo) {
	rx, err := makeRx(args)
	if err != nil {
		log.Fatalf("illegal regular expression from %v: %v", args, err)
	}

	filter := func(s string) bool { return rx.MatchString(s) }
	switch {
	case info.PAst != nil:
		newPAst := map[string]*ast.File{}
		for name, a := range info.PAst {
			cmap := ast.NewCommentMap(info.FSet, a, a.Comments)
			a.Comments = []*ast.CommentGroup{} // remove all comments.
			ast.FilterFile(a, filter)
			if len(a.Decls) > 0 {
				newPAst[name] = a
			}
			for _, d := range a.Decls {
				// add back the comments associated with d only
				comments := cmap.Filter(d).Comments()
				a.Comments = append(a.Comments, comments...)
			}
		}
		info.PAst = newPAst // add only matching files.
	case info.PDoc != nil:
		info.PDoc.Filter(filter)
	}
}
Example #8
0
// ParseDir will create a Build from the directory that
// was passed into the function.
func ParseDir(path string) (*Build, error) {
	var fileSet token.FileSet

	packages, err := parser.ParseDir(&fileSet, path, nil, parser.AllErrors)
	if err != nil {
		return nil, err
	}

	// create new build for the file set
	build := NewBuild()

	// iterate over all packages in the directory
	for _, pkg := range packages {
		// iterate over all files within the package
		for name, astTree := range pkg.Files {
			baseName := filepath.Base(name)

			// create a comment map from file
			commentMap := ast.NewCommentMap(&fileSet, astTree, astTree.Comments)

			fileAST, err := ParseFileAST(baseName, astTree, commentMap)
			if err != nil {
				return nil, err
			}
			build.AddFile(baseName, fileAST)
		}
	}

	return build, nil
}
Example #9
0
func getCommentMap(fileName string) (cm map[string]string) {
	fileSet := token.NewFileSet() // positions are relative to fset

	// Parse the file containing this very example
	// but stop after processing the imports.
	f, err := parser.ParseFile(fileSet, fileName, nil, parser.ParseComments)
	if err != nil {
		fmt.Println(err)
		return
	}

	commentMap := ast.NewCommentMap(fileSet, f, f.Comments)
	cm = make(map[string]string)
	for n, cgs := range commentMap {
		fmt.Printf("%#v,%#v --- %#v\n", n.(ast.Node).Pos(), n.(ast.Node).End(), toText(cgs))
		comment := toText(cgs)
		if len(strings.TrimSpace(comment)) == 0 {
			continue
		}
		split := strings.SplitN(comment, " ", 2)
		godoc := split[1]
		key := split[0]
		cm[key] = godoc
	}
	return cm
}
Example #10
0
// This example illustrates how to remove a variable declaration
// in a Go program while maintaining correct comment association
// using an ast.CommentMap.
func ExampleCommentMap() {
	// src is the input for which we create the AST that we
	// are going to manipulate.
	src := `
// This is the package comment.
package main

// This comment is associated with the hello constant.
const hello = "Hello, World!" // line comment 1

// This comment is associated with the foo variable.
var foo = hello // line comment 2 

// This comment is associated with the main function.
func main() {
	fmt.Println(hello) // line comment 3
}
`

	// Create the AST by parsing src.
	fset := token.NewFileSet() // positions are relative to fset
	f, err := parser.ParseFile(fset, "src.go", src, parser.ParseComments)
	if err != nil {
		panic(err)
	}

	// Create an ast.CommentMap from the ast.File's comments.
	// This helps keeping the association between comments
	// and AST nodes.
	cmap := ast.NewCommentMap(fset, f, f.Comments)

	// Remove the first variable declaration from the list of declarations.
	f.Decls = removeFirstVarDecl(f.Decls)

	// Use the comment map to filter comments that don't belong anymore
	// (the comments associated with the variable declaration), and create
	// the new comments list.
	f.Comments = cmap.Filter(f).Comments()

	// Print the modified AST.
	var buf bytes.Buffer
	if err := format.Node(&buf, fset, f); err != nil {
		panic(err)
	}
	fmt.Printf("%s", buf.Bytes())

	// output:
	// // This is the package comment.
	// package main
	//
	// // This comment is associated with the hello constant.
	// const hello = "Hello, World!" // line comment 1
	//
	// // This comment is associated with the main function.
	// func main() {
	// 	fmt.Println(hello) // line comment 3
	// }
}
Example #11
0
func functions(f *ast.File, info types.Info, fset *token.FileSet) ([]Function, error) {
	fns := exportedFuncs(f, fset)
	fns = errorOrVoid(fns, info)

	cmtMap := ast.NewCommentMap(fset, f, f.Comments)

	functions := make([]Function, len(fns))

	for i, fn := range fns {
		fun := Function{Name: fn.Name.Name}
		fun.Comment = combine(cmtMap[fn])

		// we only support null returns or error returns, so if there's a
		// return, it's an error.
		if len(fn.Type.Results.List) > 0 {
			fun.IsError = true
		}
		params := fn.Type.Params.List
		fun.Params = make([]Param, 0, len(params))
		for _, field := range params {
			t := info.TypeOf(field.Type)
			pointer := false
			if p, ok := t.(*types.Pointer); ok {
				t = p.Elem()
				pointer = true
			}
			if b, ok := t.(*types.Basic); ok {
				if b.Kind() == types.UnsafePointer {
					log.Printf(
						"Can't create command for function %q because its parameter %q is an unsafe.Pointer.",
						fn.Name.Name,
						field.Names[0])
					break
				}

				fieldCmt := combine(cmtMap[field])
				// handle a, b, c int
				for _, name := range field.Names {
					nameCmt := combine(cmtMap[name])
					if nameCmt == "" {
						nameCmt = fieldCmt
					}
					param := Param{
						Name:      name.Name,
						Type:      b.Kind(),
						IsPointer: pointer,
						Comment:   nameCmt,
					}
					fun.Params = append(fun.Params, param)
				}
				continue
			}
		}
		functions[i] = fun
	}
	return functions, nil
}
Example #12
0
// Trim trims the AST rooted at node based on the coverage profile,
// removing irrelevant and unreached parts of the program.
// If the node is an *ast.File, comments are updated as well using
// an ast.CommentMap.
func (p *Profile) Trim(node ast.Node) {
	if f, ok := node.(*ast.File); ok {
		cmap := ast.NewCommentMap(p.Fset, f, f.Comments)
		ast.Walk(&trimVisitor{p}, f)
		f.Comments = cmap.Filter(f).Comments()
	} else {
		ast.Walk(&trimVisitor{p}, node)
	}
}
Example #13
0
// Inline replaces each instance of identifier k with v.Ident in ast.File f,
// for k, v := range m.
// For all inlines that were triggeres it also adds imports from v.Imports to f.
// In addition, it removes top level type declarations of the form
// type k ...
// for all k in m.
//
// Every k in m should be a valid identifier.
// Every v.Ident should be a valid expression.
func Inline(fset *token.FileSet, f *ast.File, m map[string]Target) error {
	// Build the inline map.
	im := map[string]reflect.Value{}
	for k, v := range m {
		expr, err := parser.ParseExpr(k)
		if err != nil {
			return fmt.Errorf("failed to parse `%s`: %s", k, err)
		}
		if _, ok := expr.(*ast.Ident); !ok {
			return fmt.Errorf("expected identifier, got %s which is %T", k, expr)
		}
		expr, err = parser.ParseExpr(v.Ident)
		if err != nil {
			return fmt.Errorf("failed to parse `%s`: %s", v.Ident, err)
		}
		s := v.Ident
		if _, ok := expr.(*ast.StarExpr); ok {
			s = fmt.Sprintf("(%s)", s)
		}
		im[k] = reflect.ValueOf(ast.Ident{Name: s})
	}
	// Filter `type XXX ...` declarations out if we are inlining XXX.
	cmap := ast.NewCommentMap(fset, f, f.Comments)
	to := 0
	for _, d := range f.Decls {
		skip := false
		if t, ok := d.(*ast.GenDecl); ok {
			for _, s := range t.Specs {
				ts, ok := s.(*ast.TypeSpec)
				if !ok {
					continue
				}
				if _, ok = im[ts.Name.String()]; ok {
					skip = true
				}
			}
		}
		if !skip {
			f.Decls[to] = d
			to++
		}
	}
	if to != len(f.Decls) {
		f.Decls = f.Decls[:to]
		// Remove comments for the declarations that were filtered out.
		f.Comments = cmap.Filter(f).Comments()
	}
	// Add imports for the inlines that were triggered.
	for k := range inline(im, f) {
		for _, imp := range m[k].Imports {
			astutil.AddImport(fset, f, imp)
		}
	}
	return nil
}
Example #14
0
func (file *File) initCMap() {
	goCMap := ast.NewCommentMap(file.FSet, file.AST, file.AST.Comments)

	myCMap := make(map[int]cmapel, len(file.AST.Comments))
	for _, comment := range file.AST.Comments {
		end := file.FSet.Position(comment.End())
		myCMap[end.Line] = cmapel{
			Comment: comment,
			End:     end,
		}
	}
	file.cmap = cmap{
		GoCMap: goCMap,
		MyCMap: myCMap,
	}
}
Example #15
0
func AnalyzePkg(files []*ast.File, fileSet *token.FileSet, typesInfo *types.Info, typesPkg *types.Package, isBlocking func(*types.Func) bool) *Info {
	info := &Info{
		Info:          typesInfo,
		Pkg:           typesPkg,
		HasPointer:    make(map[*types.Var]bool),
		comments:      make(ast.CommentMap),
		IsBlocking:    isBlocking,
		FuncDeclInfos: make(map[*types.Func]*FuncInfo),
		FuncLitInfos:  make(map[*ast.FuncLit]*FuncInfo),
	}
	info.InitFuncInfo = info.newFuncInfo()

	for _, file := range files {
		for k, v := range ast.NewCommentMap(fileSet, file, file.Comments) {
			info.comments[k] = v
		}
		ast.Walk(info.InitFuncInfo, file)
	}

	for {
		done := true
		for _, funcInfo := range info.allInfos {
			for obj, calls := range funcInfo.LocalCalls {
				if len(info.FuncDeclInfos[obj].Blocking) != 0 {
					for _, call := range calls {
						funcInfo.markBlocking(call)
					}
					delete(funcInfo.LocalCalls, obj)
					done = false
				}
			}
		}
		if done {
			break
		}
	}

	for _, funcInfo := range info.allInfos {
		for _, continueStmt := range funcInfo.ContinueStmts {
			if funcInfo.Blocking[continueStmt.forStmt.Post] {
				funcInfo.markBlocking(continueStmt.analyzeStack)
			}
		}
	}

	return info
}
Example #16
0
func main() {
	src := `
// HelloWorldService
package main

const hello = "Hello World!"

// This is a forbidden phrase!
const evil = "Where is my daily paçoca???"

func main() {
    fmt.Println(hello)
}
`

	// Create the AST by parsing src.
	fset := token.NewFileSet() // positions are relative to fset
	f, err := parser.ParseFile(fset, "src.go", src, parser.ParseComments)
	if err != nil {
		panic(err)
	}

	// Change the package comment
	f.Comments[0].List[0].Text = f.Comments[0].List[0].Text + " - CENSORED BY NEOWAY"

	// Create an ast.CommentMap from the ast.File's comments.
	// This helps keeping the association between comments
	// and AST nodes.
	cmap := ast.NewCommentMap(fset, f, f.Comments)

	// Remove the evil variable declaration from the list of declarations.
	f.Decls = append(f.Decls[:1], f.Decls[2:]...)

	// Use the comment map to filter comments that don't belong anymore
	// (the comments associated with the variable declaration), and create
	// the new comments list.
	f.Comments = cmap.Filter(f).Comments()

	// Print the modified AST.
	var buf bytes.Buffer
	if err := format.Node(&buf, fset, f); err != nil {
		panic(err)
	}
	fmt.Printf("%s", buf.Bytes())
}
Example #17
0
File: astx.go Project: gofmt/astx
func parseFileStructs(fset *token.FileSet, f *ast.File) []Struct {
	parsedStructs := []Struct{}
	commentMap := ast.NewCommentMap(fset, f, f.Comments)

	for _, decl := range f.Decls {
		genDecl, ok := decl.(*ast.GenDecl)
		if !ok {
			continue
		}

		for _, spec := range genDecl.Specs {
			typeSpec, ok := spec.(*ast.TypeSpec)
			if !ok {
				continue
			}
			structType, ok := typeSpec.Type.(*ast.StructType)
			if !ok {
				continue
			}
			structName := typeSpec.Name.Name
			var comments []string
			commentGroups := commentMap[genDecl]
			if commentGroups != nil {
				for _, group := range commentGroups {
					comments = append(comments, parseComments(group)...)
				}
			}
			parsedStruct := parseStruct(fset, structType)
			parsedStruct.Name = structName
			parsedStruct.Comments = comments
			parsedStructs = append(parsedStructs, *parsedStruct)
		}
	}

	if len(parsedStructs) == 0 {
		return nil
	}

	return parsedStructs
}
Example #18
0
func processFile(filename string, out func(Measurement)) {
	fset := token.NewFileSet() // positions are relative to fset
	f, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
	if err != nil {
		panic(err)
	}

	// use commentmap, since Doc() comment in ast below is not enough
	cmap := ast.NewCommentMap(fset, f, f.Comments)

	// iterate over function declarations from AST
	ast.Inspect(f, func(n ast.Node) bool {
		switch x := n.(type) {
		case *ast.FuncDecl:
			name := f.Name.String() + "/" + x.Name.String()
			out(Measurement{name + "/begin", strconv.Itoa(fset.Position(x.Pos()).Line)})
			out(Measurement{name + "/end", strconv.Itoa(fset.Position(x.End()).Line)})
			out(Measurement{name + "/filename", fset.Position(x.Pos()).Filename})

			getTSpec(name, cmap[n], out)
		}
		return true
	})
}
Example #19
0
func filterInfo(pres *Presentation, args []string, info *PageInfo) {
	rx, err := makeRx(args)
	if err != nil {
		log.Fatalf("illegal regular expression from %v: %v", args, err)
	}

	filter := func(s string) bool { return rx.MatchString(s) }
	switch {
	case info.PAst != nil:
		cmap := ast.NewCommentMap(info.FSet, info.PAst, info.PAst.Comments)
		ast.FilterFile(info.PAst, filter)
		// Special case: Don't use templates for printing
		// so we only get the filtered declarations without
		// package clause or extra whitespace.
		for i, d := range info.PAst.Decls {
			// determine the comments associated with d only
			comments := cmap.Filter(d).Comments()
			cn := &printer.CommentedNode{Node: d, Comments: comments}
			if i > 0 {
				fmt.Println()
			}
			if pres.HTMLMode {
				var buf bytes.Buffer
				pres.WriteNode(&buf, info.FSet, cn)
				FormatText(os.Stdout, buf.Bytes(), -1, true, "", nil)
			} else {
				pres.WriteNode(os.Stdout, info.FSet, cn)
			}
			fmt.Println()
		}
		return

	case info.PDoc != nil:
		info.PDoc.Filter(filter)
	}
}
Example #20
0
func buildMockForInterface(o *options, t *ast.InterfaceType, imports []*ast.ImportSpec) string {
	// TODO: if we're not building this mock in the package it came from then
	// we need to qualify any local types and add an import.
	// We make up a package name that's unlikely to be used

	if o.pkg != nil {
		thisdir, _ := os.Getwd()
		if thisdir != o.pkg.Dir {
			if qualifyLocalTypes(t, "utmocklocal") {
				imports = append(imports, &ast.ImportSpec{
					Name: ast.NewIdent("utmocklocal"),
					Path: &ast.BasicLit{
						Kind:  token.STRING,
						Value: "\"" + o.pkg.ImportPath + "\"",
					},
				})
			}
		}
	}

	// Mock Implementation of the interface
	mockAst, fset, err := buildBasicFile(o.targetPackage, o.mockName)
	if err != nil {
		fmt.Printf("Failed to parse basic AST. %v", err)
		os.Exit(2)
	}

	// Build a map to keep track of where the comments are
	cmap := ast.NewCommentMap(fset, mockAst, mockAst.Comments)

	// Method receiver for our mock interface
	recv := buildMethodReceiver(o.mockName)

	// Add methods to our mockAst for each interface method
	for _, m := range t.Methods.List {
		t, ok := m.Type.(*ast.FuncType)
		if ok {
			// Names for return values causes problems, so remove them.
			if t.Results != nil {
				removeFieldNames(t.Results)
			}

			// We can have multiple names for a method type if multiple
			// methods are declared with the same signature
			for _, n := range m.Names {
				fd := buildMockMethod(recv, n.Name, t)

				mockAst.Decls = append(mockAst.Decls, fd)
			}
		}
	}

	addImportsToMock(mockAst, fset, imports)

	// Fixup the comments
	mockAst.Comments = cmap.Filter(mockAst).Comments()

	var buf bytes.Buffer
	format.Node(&buf, fset, mockAst)

	return buf.String()
}
Example #21
0
File: main.go Project: hfeeki/go
func main() {
	flag.Usage = usage
	flag.Parse()

	// Check usage: either server and no args, command line and args, or index creation mode
	if (*httpAddr != "" || *urlFlag != "") != (flag.NArg() == 0) && !*writeIndex {
		usage()
	}

	if *tabwidth < 0 {
		log.Fatalf("negative tabwidth %d", *tabwidth)
	}

	// Determine file system to use.
	// TODO(gri) - fs and fsHttp should really be the same. Try to unify.
	//           - fsHttp doesn't need to be set up in command-line mode,
	//             same is true for the http handlers in initHandlers.
	if *zipfile == "" {
		// use file system of underlying OS
		fs.Bind("/", OS(*goroot), "/", bindReplace)
		if *templateDir != "" {
			fs.Bind("/lib/godoc", OS(*templateDir), "/", bindBefore)
		}
	} else {
		// use file system specified via .zip file (path separator must be '/')
		rc, err := zip.OpenReader(*zipfile)
		if err != nil {
			log.Fatalf("%s: %s\n", *zipfile, err)
		}
		defer rc.Close() // be nice (e.g., -writeIndex mode)
		fs.Bind("/", NewZipFS(rc, *zipfile), *goroot, bindReplace)
	}

	// Bind $GOPATH trees into Go root.
	for _, p := range filepath.SplitList(build.Default.GOPATH) {
		fs.Bind("/src/pkg", OS(p), "/src", bindAfter)
	}

	readTemplates()
	initHandlers()

	if *writeIndex {
		// Write search index and exit.
		if *indexFiles == "" {
			log.Fatal("no index file specified")
		}

		log.Println("initialize file systems")
		*verbose = true // want to see what happens
		initFSTree()

		*indexThrottle = 1
		updateIndex()

		log.Println("writing index file", *indexFiles)
		f, err := os.Create(*indexFiles)
		if err != nil {
			log.Fatal(err)
		}
		index, _ := searchIndex.get()
		err = index.(*Index).Write(f)
		if err != nil {
			log.Fatal(err)
		}

		log.Println("done")
		return
	}

	// Print content that would be served at the URL *urlFlag.
	if *urlFlag != "" {
		registerPublicHandlers(http.DefaultServeMux)
		// Try up to 10 fetches, following redirects.
		urlstr := *urlFlag
		for i := 0; i < 10; i++ {
			// Prepare request.
			u, err := url.Parse(urlstr)
			if err != nil {
				log.Fatal(err)
			}
			req := &http.Request{
				URL: u,
			}

			// Invoke default HTTP handler to serve request
			// to our buffering httpWriter.
			w := &httpWriter{h: http.Header{}, code: 200}
			http.DefaultServeMux.ServeHTTP(w, req)

			// Return data, error, or follow redirect.
			switch w.code {
			case 200: // ok
				os.Stdout.Write(w.Bytes())
				return
			case 301, 302, 303, 307: // redirect
				redirect := w.h.Get("Location")
				if redirect == "" {
					log.Fatalf("HTTP %d without Location header", w.code)
				}
				urlstr = redirect
			default:
				log.Fatalf("HTTP error %d", w.code)
			}
		}
		log.Fatalf("too many redirects")
	}

	if *httpAddr != "" {
		// HTTP server mode.
		var handler http.Handler = http.DefaultServeMux
		if *verbose {
			log.Printf("Go Documentation Server")
			log.Printf("version = %s", runtime.Version())
			log.Printf("address = %s", *httpAddr)
			log.Printf("goroot = %s", *goroot)
			log.Printf("tabwidth = %d", *tabwidth)
			switch {
			case !*indexEnabled:
				log.Print("search index disabled")
			case *maxResults > 0:
				log.Printf("full text index enabled (maxresults = %d)", *maxResults)
			default:
				log.Print("identifier search index enabled")
			}
			fs.Fprint(os.Stderr)
			handler = loggingHandler(handler)
		}

		registerPublicHandlers(http.DefaultServeMux)
		registerPlaygroundHandlers(http.DefaultServeMux)

		// Initialize default directory tree with corresponding timestamp.
		// (Do it in a goroutine so that launch is quick.)
		go initFSTree()

		// Immediately update metadata.
		updateMetadata()
		// Periodically refresh metadata.
		go refreshMetadataLoop()

		// Initialize search index.
		if *indexEnabled {
			go indexer()
		}

		// Start http server.
		if err := http.ListenAndServe(*httpAddr, handler); err != nil {
			log.Fatalf("ListenAndServe %s: %v", *httpAddr, err)
		}

		return
	}

	// Command line mode.
	if *html {
		packageText = packageHTML
		searchText = packageHTML
	}

	if *query {
		// Command-line queries.
		for i := 0; i < flag.NArg(); i++ {
			res, err := remoteSearch(flag.Arg(i))
			if err != nil {
				log.Fatalf("remoteSearch: %s", err)
			}
			io.Copy(os.Stdout, res.Body)
		}
		return
	}

	// Determine paths.
	//
	// If we are passed an operating system path like . or ./foo or /foo/bar or c:\mysrc,
	// we need to map that path somewhere in the fs name space so that routines
	// like getPageInfo will see it.  We use the arbitrarily-chosen virtual path "/target"
	// for this.  That is, if we get passed a directory like the above, we map that
	// directory so that getPageInfo sees it as /target.
	const target = "/target"
	const cmdPrefix = "cmd/"
	path := flag.Arg(0)
	var forceCmd bool
	var abspath, relpath string
	if filepath.IsAbs(path) {
		fs.Bind(target, OS(path), "/", bindReplace)
		abspath = target
	} else if build.IsLocalImport(path) {
		cwd, _ := os.Getwd() // ignore errors
		path = filepath.Join(cwd, path)
		fs.Bind(target, OS(path), "/", bindReplace)
		abspath = target
	} else if strings.HasPrefix(path, cmdPrefix) {
		path = path[len(cmdPrefix):]
		forceCmd = true
	} else if bp, _ := build.Import(path, "", build.FindOnly); bp.Dir != "" && bp.ImportPath != "" {
		fs.Bind(target, OS(bp.Dir), "/", bindReplace)
		abspath = target
		relpath = bp.ImportPath
	} else {
		abspath = pathpkg.Join(pkgHandler.fsRoot, path)
	}
	if relpath == "" {
		relpath = abspath
	}

	var mode PageInfoMode
	if relpath == builtinPkgPath {
		// the fake built-in package contains unexported identifiers
		mode = noFiltering
	}
	if *srcMode {
		// only filter exports if we don't have explicit command-line filter arguments
		if flag.NArg() > 1 {
			mode |= noFiltering
		}
		mode |= showSource
	}

	// first, try as package unless forced as command
	var info PageInfo
	if !forceCmd {
		info = pkgHandler.getPageInfo(abspath, relpath, mode)
	}

	// second, try as command unless the path is absolute
	// (the go command invokes godoc w/ absolute paths; don't override)
	var cinfo PageInfo
	if !filepath.IsAbs(path) {
		abspath = pathpkg.Join(cmdHandler.fsRoot, path)
		cinfo = cmdHandler.getPageInfo(abspath, relpath, mode)
	}

	// determine what to use
	if info.IsEmpty() {
		if !cinfo.IsEmpty() {
			// only cinfo exists - switch to cinfo
			info = cinfo
		}
	} else if !cinfo.IsEmpty() {
		// both info and cinfo exist - use cinfo if info
		// contains only subdirectory information
		if info.PAst == nil && info.PDoc == nil {
			info = cinfo
		} else {
			fmt.Printf("use 'godoc %s%s' for documentation on the %s command \n\n", cmdPrefix, relpath, relpath)
		}
	}

	if info.Err != nil {
		log.Fatalf("%v", info.Err)
	}
	if info.PDoc != nil && info.PDoc.ImportPath == target {
		// Replace virtual /target with actual argument from command line.
		info.PDoc.ImportPath = flag.Arg(0)
	}

	// If we have more than one argument, use the remaining arguments for filtering
	if flag.NArg() > 1 {
		args := flag.Args()[1:]
		rx := makeRx(args)
		if rx == nil {
			log.Fatalf("illegal regular expression from %v", args)
		}

		filter := func(s string) bool { return rx.MatchString(s) }
		switch {
		case info.PAst != nil:
			cmap := ast.NewCommentMap(info.FSet, info.PAst, info.PAst.Comments)
			ast.FilterFile(info.PAst, filter)
			// Special case: Don't use templates for printing
			// so we only get the filtered declarations without
			// package clause or extra whitespace.
			for i, d := range info.PAst.Decls {
				// determine the comments associated with d only
				comments := cmap.Filter(d).Comments()
				cn := &printer.CommentedNode{Node: d, Comments: comments}
				if i > 0 {
					fmt.Println()
				}
				if *html {
					var buf bytes.Buffer
					writeNode(&buf, info.FSet, cn)
					FormatText(os.Stdout, buf.Bytes(), -1, true, "", nil)
				} else {
					writeNode(os.Stdout, info.FSet, cn)
				}
				fmt.Println()
			}
			return

		case info.PDoc != nil:
			info.PDoc.Filter(filter)
		}
	}

	if err := packageText.Execute(os.Stdout, info); err != nil {
		log.Printf("packageText.Execute: %s", err)
	}
}
Example #22
0
func MakeFile(pkg, pkgout, src string, debug bool) (*File, error) {
	fset := token.NewFileSet()
	f, err := parser.ParseFile(fset, "", src, parser.ParseComments)
	if err != nil {
		return nil, e.New(err)
	}
	v := &Visitor{
		src:         src,
		Struct:      make(map[string]*Strct),
		Cmap:        ast.NewCommentMap(fset, f, f.Comments),
		StructOrder: make([]*Strct, 0),
		Pkgs:        NewPkgs(),
	}
	v.Pkgs.Have("crypto/tls")
	v.Pkgs.Have("fmt")
	v.Pkgs.Have("log")
	v.Pkgs.Have("reflect")
	v.Pkgs.Have("tls")
	v.Pkgs.Have("time")
	v.Pkgs.Have("github.com/fcavani/e")
	v.Pkgs.Have("github.com/fcavani/gormethods/auth")
	v.Pkgs.Have("github.com/fcavani/gormethods/client")
	v.Pkgs.Have("gopkg.in/vmihailenco/msgpack.v2")

	ast.Walk(v, f)

	file := &File{
		Package:     f.Name.Name,
		Debug:       debug,
		PkgComplete: f.Name.Name,
		Pkgs:        v.Pkgs.Packages(),
	}

	if pkg != "" {
		file.PkgComplete = pkg
	}

	if pkgout != "" {
		file.Package = pkgout
	}

	objs := make(map[string]*Object)
	order := make([]string, 0)
	for _, f := range v.Functions {
		u := utf8string.NewString(f.RecvType)
		if u.RuneCount() > 0 && !unicode.IsUpper(u.At(0)) {
			continue
		}
		if f.RecvType == "" {
			continue
		}
		if FuncForbiden(f.Name) {
			continue
		}
		obj, found := objs[f.RecvType]
		if !found {
			function, err := makeFunction(f)
			if err != nil {
				return nil, e.Forward(err)
			}
			t := f.RecvType
			if f.PtrRecv {
				t = "*" + t
			}
			strct, found := v.Struct[f.RecvType]
			if !found {
				return nil, e.New("struct %v not found", f.RecvType)
			}
			objs[f.RecvType] = &Object{
				Name:       f.RecvType,
				NameClient: f.RecvType + "Client",
				RecvVar:    f.Recv,
				RecvType:   t,
				Functions:  []*Function{function},
				Type:       makeType(src, strct.Type),
				Comments:   strings.Replace(strct.Comments, f.RecvType, f.RecvType+"Client", -1),
			}
			v.Struct[f.RecvType].FunctionCount++
			order = append(order, f.RecvType)
		} else {
			function, err := makeFunction(f)
			if err != nil {
				return nil, e.Forward(err)
			}
			obj.Functions = append(obj.Functions, function)
		}
	}

	for _, s := range v.StructOrder {
		if s.FunctionCount == 0 {
			first, err := rand.Chars(1, rand.Letters, "go")
			if err != nil {
				return nil, e.Push(err, "can't create a name to the receiver for the struct")
			}
			recv, err := rand.Chars(2, rand.NumberLetters, "go")
			if err != nil {
				return nil, e.Push(err, "can't create a name to the receiver for the struct")
			}
			recv = strings.ToLower(first + recv)
			objs[s.Name] = &Object{
				Name:       s.Name,
				NameClient: s.Name + "Client",
				RecvVar:    recv,
				RecvType:   s.Name,
				Functions:  nil,
				Type:       makeType(src, s.Type),
				Comments:   strings.Replace(s.Comments, s.Name, s.Name+"Client", -1),
			}
		}
	}

	for _, s := range v.StructOrder {
		if obj, found := objs[s.Name]; found {
			file.Objects = append(file.Objects, obj)
		} else {
			return nil, e.New("object not found: %v", s.Name)
		}
	}

	return file, nil
}
Example #23
0
func loadFunc(obj types.Object) *funcNode {
	f := newFuncNode(obj, nil)
	fset := token.NewFileSet()
	file, err := parser.ParseFile(fset, fluxPath(obj), nil, parser.ParseComments)
	if err == nil {
		r := &reader{fset, obj.GetPkg(), types.NewScope(obj.GetPkg().Scope()), map[string]*port{}, map[string][]*connection{}, ast.NewCommentMap(fset, file, file.Comments), map[int]node{}}
		for _, i := range file.Imports {
			path, _ := strconv.Unquote(i.Path.Value)
			pkg, err := getPackage(path)
			if err != nil {
				fmt.Printf("error importing %s: %s\n", i.Path.Value, err)
				continue
			}
			name := pkg.Name
			if i.Name != nil {
				name = i.Name.Name
			}
			r.scope.Insert(types.NewPkgName(0, pkg, name))
		}
		decl := file.Decls[len(file.Decls)-1].(*ast.FuncDecl) // get param and result var names from the source, as the obj names might not match
		if decl.Recv != nil {
			r.out(decl.Recv.List[0].Names[0], f.inputsNode.newOutput(obj.GetType().(*types.Signature).Recv))
		}
		r.fun(f, decl.Type, decl.Body)
	} else {
		// this is a new func; save it
		if isMethod(obj) {
			f.inputsNode.newOutput(obj.GetType().(*types.Signature).Recv)
		}
		saveFunc(f)
	}
	return f
}
Example #24
0
func (ts *ThreatSpec) ParseFile(filename string) error {

	fset := token.NewFileSet()
	f, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
	if err != nil {
		return err
	}

	cmap := ast.NewCommentMap(fset, f, f.Comments)

	// Iterate all looking for non-function comments
	for _, lines := range cmap.Comments() {
		for _, line := range strings.Split(lines.Text(), "\n") {

			if id, alias := ts.ParseAlias(line); alias != nil {
				ts.AddAlias(id, alias)
			}
		}
	}

	// Look for function-specific comments
	ast.Inspect(f, func(n ast.Node) bool {
		switch x := n.(type) {
		case *ast.FuncDecl:
			var fType string
			// https://www.socketloop.com/references/golang-go-ast-funcdecl-type-example
			if x.Recv != nil {
				recvType := x.Recv.List[0].Type
				if recvStarType, ok := recvType.(*ast.StarExpr); ok {
					fType = "(*" + recvStarType.X.(*ast.Ident).Name + ")"
				} else {
					fType = recvType.(*ast.Ident).Name
				}
			} else {
				fType = ""
			}

			function := Function{Begin: fset.Position(x.Pos()).Line,
				Package:  f.Name.String(),
				Name:     x.Name.String(),
				Type:     fType,
				End:      fset.Position(x.End()).Line,
				Filepath: fset.Position(x.Pos()).Filename,
				Comments: cmap[n]}

			source := function.ToSource()
			for _, lines := range function.Comments {
				for _, line := range strings.Split(lines.Text(), "\n") {
					if id, mitigation := ts.ParseMitigation(line, source); mitigation != nil {
						ts.AddMitigation(id, mitigation)
					} else if id, exposure := ts.ParseExposure(line, source); exposure != nil {
						ts.AddExposure(id, exposure)
					} else if id, transfer := ts.ParseTransfer(line, source); transfer != nil {
						ts.AddTransfer(id, transfer)
					} else if id, acceptance := ts.ParseAcceptance(line, source); acceptance != nil {
						ts.AddAcceptance(id, acceptance)
					}
				}
			}

		}
		return true
	})

	return nil
}
Example #25
0
// doPackage analyzes the single package constructed from the named files, looking for
// the definition of ident.
func doPackage(pkg *ast.Package, fset *token.FileSet, ident string) {
	var files []*File
	found := false
	for name, astFile := range pkg.Files {
		if packageFlag && astFile.Doc == nil {
			continue
		}
		file := &File{
			fset:       fset,
			name:       name,
			ident:      ident,
			lowerIdent: strings.ToLower(ident),
			file:       astFile,
			comments:   ast.NewCommentMap(fset, astFile, astFile.Comments),
		}
		if regexpFlag && regexp.QuoteMeta(ident) != ident {
			// It's a regular expression.
			var err error
			file.regexp, err = regexp.Compile("^(?i:" + ident + ")$")
			if err != nil {
				fmt.Fprintf(os.Stderr, "regular expression `%s`:", err)
				os.Exit(2)
			}
		}
		switch {
		case strings.HasPrefix(name, goRootSrcPkg):
			file.urlPrefix = "http://golang.org/pkg"
			file.pathPrefix = goRootSrcPkg
		case strings.HasPrefix(name, goRootSrcCmd):
			file.urlPrefix = "http://golang.org/cmd"
			file.pathPrefix = goRootSrcCmd
		default:
			file.urlPrefix = "http://godoc.org"
			for _, path := range goPaths {
				p := filepath.Join(path, "src")
				if strings.HasPrefix(name, p) {
					file.pathPrefix = p
					break
				}
			}
		}
		file.urlPrefix = urlHeadTag + file.urlPrefix
		files = append(files, file)
		if found {
			continue
		}
		file.doPrint = false
		if packageFlag {
			file.pkgComments()
		} else {
			ast.Walk(file, file.file)
			if file.found {
				found = true
			}
		}
	}

	if !found {
		return
	}

	// Type check to build map from name to type.
	defs := make(map[*ast.Ident]types.Object)
	uses := make(map[*ast.Ident]types.Object)

	// By providing the Context with our own error function, it will continue
	// past the first error. There is no need for that function to do anything.
	config := types.Config{
		Error: func(error) {},
	}
	info := &types.Info{
		Defs: defs,
		Uses: uses,
	}
	path := ""
	var astFiles []*ast.File
	for name, astFile := range pkg.Files {
		if path == "" {
			path = name
		}
		astFiles = append(astFiles, astFile)
	}
	config.Check(path, fset, astFiles, info) // Ignore errors.

	// We need to search all files for methods, so record the full list in each file.
	for _, file := range files {
		file.allFiles = files
	}
	for _, file := range files {
		file.doPrint = true
		file.objs = uses
		if packageFlag {
			file.pkgComments()
		} else {
			ast.Walk(file, file.file)
		}
	}
}
Example #26
0
func main() {
	flag.Usage = usage
	flag.Parse()

	// Check usage: either server and no args, command line and args, or index creation mode
	if (*httpAddr != "" || *urlFlag != "") != (flag.NArg() == 0) && !*writeIndex {
		usage()
	}

	// Determine file system to use.
	if *zipfile == "" {
		// use file system of underlying OS
		fs.Bind("/", vfs.OS(*goroot), "/", vfs.BindReplace)
		if *templateDir != "" {
			fs.Bind("/lib/godoc", vfs.OS(*templateDir), "/", vfs.BindBefore)
		} else {
			fs.Bind("/lib/godoc", mapfs.New(static.Files), "/", vfs.BindReplace)
		}
	} else {
		// use file system specified via .zip file (path separator must be '/')
		rc, err := zip.OpenReader(*zipfile)
		if err != nil {
			log.Fatalf("%s: %s\n", *zipfile, err)
		}
		defer rc.Close() // be nice (e.g., -writeIndex mode)
		fs.Bind("/", zipfs.New(rc, *zipfile), *goroot, vfs.BindReplace)
	}

	// Bind $GOPATH trees into Go root.
	for _, p := range filepath.SplitList(build.Default.GOPATH) {
		fs.Bind("/src/pkg", vfs.OS(p), "/src", vfs.BindAfter)
	}

	httpMode := *httpAddr != ""

	corpus := godoc.NewCorpus(fs)
	corpus.Verbose = *verbose
	corpus.IndexEnabled = *indexEnabled && httpMode
	corpus.IndexFiles = *indexFiles
	corpus.IndexThrottle = *indexThrottle
	if *writeIndex {
		corpus.IndexThrottle = 1.0
	}
	if *writeIndex || httpMode || *urlFlag != "" {
		if err := corpus.Init(); err != nil {
			log.Fatal(err)
		}
	}

	pres = godoc.NewPresentation(corpus)
	pres.TabWidth = *tabWidth
	pres.ShowTimestamps = *showTimestamps
	pres.ShowPlayground = *showPlayground
	pres.ShowExamples = *showExamples
	pres.DeclLinks = *declLinks
	if *notesRx != "" {
		pres.NotesRx = regexp.MustCompile(*notesRx)
	}

	readTemplates(pres, httpMode || *urlFlag != "" || *html)
	registerHandlers(pres)

	if *writeIndex {
		// Write search index and exit.
		if *indexFiles == "" {
			log.Fatal("no index file specified")
		}

		log.Println("initialize file systems")
		*verbose = true // want to see what happens

		corpus.UpdateIndex()

		log.Println("writing index file", *indexFiles)
		f, err := os.Create(*indexFiles)
		if err != nil {
			log.Fatal(err)
		}
		index, _ := corpus.CurrentIndex()
		err = index.Write(f)
		if err != nil {
			log.Fatal(err)
		}

		log.Println("done")
		return
	}

	// Print content that would be served at the URL *urlFlag.
	if *urlFlag != "" {
		handleURLFlag()
		return
	}

	if httpMode {
		// HTTP server mode.
		var handler http.Handler = http.DefaultServeMux
		if *verbose {
			log.Printf("Go Documentation Server")
			log.Printf("version = %s", runtime.Version())
			log.Printf("address = %s", *httpAddr)
			log.Printf("goroot = %s", *goroot)
			log.Printf("tabwidth = %d", *tabWidth)
			switch {
			case !*indexEnabled:
				log.Print("search index disabled")
			case *maxResults > 0:
				log.Printf("full text index enabled (maxresults = %d)", *maxResults)
			default:
				log.Print("identifier search index enabled")
			}
			fs.Fprint(os.Stderr)
			handler = loggingHandler(handler)
		}

		// Initialize search index.
		if *indexEnabled {
			go corpus.RunIndexer()
		}

		// Start http server.
		if err := http.ListenAndServe(*httpAddr, handler); err != nil {
			log.Fatalf("ListenAndServe %s: %v", *httpAddr, err)
		}

		return
	}

	packageText := pres.PackageText

	// Command line mode.
	if *html {
		packageText = pres.PackageHTML
	}

	if *query {
		handleRemoteSearch()
		return
	}

	// Determine paths.
	//
	// If we are passed an operating system path like . or ./foo or /foo/bar or c:\mysrc,
	// we need to map that path somewhere in the fs name space so that routines
	// like getPageInfo will see it.  We use the arbitrarily-chosen virtual path "/target"
	// for this.  That is, if we get passed a directory like the above, we map that
	// directory so that getPageInfo sees it as /target.
	const target = "/target"
	const cmdPrefix = "cmd/"
	path := flag.Arg(0)
	var forceCmd bool
	var abspath, relpath string
	if filepath.IsAbs(path) {
		fs.Bind(target, vfs.OS(path), "/", vfs.BindReplace)
		abspath = target
	} else if build.IsLocalImport(path) {
		cwd, _ := os.Getwd() // ignore errors
		path = filepath.Join(cwd, path)
		fs.Bind(target, vfs.OS(path), "/", vfs.BindReplace)
		abspath = target
	} else if strings.HasPrefix(path, cmdPrefix) {
		path = strings.TrimPrefix(path, cmdPrefix)
		forceCmd = true
	} else if bp, _ := build.Import(path, "", build.FindOnly); bp.Dir != "" && bp.ImportPath != "" {
		fs.Bind(target, vfs.OS(bp.Dir), "/", vfs.BindReplace)
		abspath = target
		relpath = bp.ImportPath
	} else {
		abspath = pathpkg.Join(pres.PkgFSRoot(), path)
	}
	if relpath == "" {
		relpath = abspath
	}

	var mode godoc.PageInfoMode
	if relpath == "builtin" {
		// the fake built-in package contains unexported identifiers
		mode = godoc.NoFiltering | godoc.NoFactoryFuncs
	}
	if *srcMode {
		// only filter exports if we don't have explicit command-line filter arguments
		if flag.NArg() > 1 {
			mode |= godoc.NoFiltering
		}
		mode |= godoc.ShowSource
	}

	// first, try as package unless forced as command
	var info *godoc.PageInfo
	if !forceCmd {
		info = pres.GetPkgPageInfo(abspath, relpath, mode)
	}

	// second, try as command unless the path is absolute
	// (the go command invokes godoc w/ absolute paths; don't override)
	var cinfo *godoc.PageInfo
	if !filepath.IsAbs(path) {
		abspath = pathpkg.Join(pres.CmdFSRoot(), path)
		cinfo = pres.GetCmdPageInfo(abspath, relpath, mode)
	}

	// determine what to use
	if info == nil || info.IsEmpty() {
		if cinfo != nil && !cinfo.IsEmpty() {
			// only cinfo exists - switch to cinfo
			info = cinfo
		}
	} else if cinfo != nil && !cinfo.IsEmpty() {
		// both info and cinfo exist - use cinfo if info
		// contains only subdirectory information
		if info.PAst == nil && info.PDoc == nil {
			info = cinfo
		} else {
			fmt.Printf("use 'godoc %s%s' for documentation on the %s command \n\n", cmdPrefix, relpath, relpath)
		}
	}

	if info == nil {
		log.Fatalf("%s: no such directory or package", flag.Arg(0))
	}
	if info.Err != nil {
		log.Fatalf("%v", info.Err)
	}

	if info.PDoc != nil && info.PDoc.ImportPath == target {
		// Replace virtual /target with actual argument from command line.
		info.PDoc.ImportPath = flag.Arg(0)
	}

	// If we have more than one argument, use the remaining arguments for filtering.
	if flag.NArg() > 1 {
		args := flag.Args()[1:]
		rx := makeRx(args)
		if rx == nil {
			log.Fatalf("illegal regular expression from %v", args)
		}

		filter := func(s string) bool { return rx.MatchString(s) }
		switch {
		case info.PAst != nil:
			cmap := ast.NewCommentMap(info.FSet, info.PAst, info.PAst.Comments)
			ast.FilterFile(info.PAst, filter)
			// Special case: Don't use templates for printing
			// so we only get the filtered declarations without
			// package clause or extra whitespace.
			for i, d := range info.PAst.Decls {
				// determine the comments associated with d only
				comments := cmap.Filter(d).Comments()
				cn := &printer.CommentedNode{Node: d, Comments: comments}
				if i > 0 {
					fmt.Println()
				}
				if *html {
					var buf bytes.Buffer
					pres.WriteNode(&buf, info.FSet, cn)
					godoc.FormatText(os.Stdout, buf.Bytes(), -1, true, "", nil)
				} else {
					pres.WriteNode(os.Stdout, info.FSet, cn)
				}
				fmt.Println()
			}
			return

		case info.PDoc != nil:
			info.PDoc.Filter(filter)
		}
	}

	if err := packageText.Execute(os.Stdout, info); err != nil {
		log.Printf("packageText.Execute: %s", err)
	}
}
Example #27
0
func Compile(importPath string, files []*ast.File, fileSet *token.FileSet, importContext *ImportContext, minify bool) (*Archive, error) {
	info := &types.Info{
		Types:      make(map[ast.Expr]types.TypeAndValue),
		Defs:       make(map[*ast.Ident]types.Object),
		Uses:       make(map[*ast.Ident]types.Object),
		Implicits:  make(map[ast.Node]types.Object),
		Selections: make(map[*ast.SelectorExpr]*types.Selection),
	}

	var errList ErrorList
	var previousErr error
	config := &types.Config{
		Packages: importContext.Packages,
		Import: func(_ map[string]*types.Package, path string) (*types.Package, error) {
			if _, err := importContext.Import(path); err != nil {
				return nil, err
			}
			return importContext.Packages[path], nil
		},
		Sizes: sizes32,
		Error: func(err error) {
			if previousErr != nil && previousErr.Error() == err.Error() {
				return
			}
			errList = append(errList, err)
			previousErr = err
		},
	}
	typesPkg, err := config.Check(importPath, fileSet, files, info)
	if errList != nil {
		return nil, errList
	}
	if err != nil {
		return nil, err
	}
	importContext.Packages[importPath] = typesPkg

	gcData := bytes.NewBuffer(nil)
	gcexporter.Write(typesPkg, gcData, sizes32)
	encodedFileSet := bytes.NewBuffer(nil)
	if err := fileSet.Write(json.NewEncoder(encodedFileSet).Encode); err != nil {
		return nil, err
	}
	archive := &Archive{
		ImportPath:   PkgPath(importPath),
		GcData:       gcData.Bytes(),
		Dependencies: []PkgPath{PkgPath("github.com/gopherjs/gopherjs/js"), PkgPath("runtime")}, // all packages depend on those
		FileSet:      encodedFileSet.Bytes(),
		Minified:     minify,
	}

	c := &funcContext{
		p: &pkgContext{
			pkg:           typesPkg,
			info:          info,
			importContext: importContext,
			comments:      make(ast.CommentMap),
			funcContexts:  make(map[*types.Func]*funcContext),
			pkgVars:       make(map[string]string),
			objectVars:    make(map[types.Object]string),
			escapingVars:  make(map[types.Object]bool),
			indentation:   1,
			dependencies:  make(map[types.Object]bool),
			minify:        minify,
		},
		allVars:     make(map[string]int),
		flowDatas:   map[string]*flowData{"": &flowData{}},
		flattened:   make(map[ast.Node]bool),
		blocking:    make(map[ast.Node]bool),
		caseCounter: 1,
		labelCases:  make(map[string]int),
		localCalls:  make(map[*types.Func][][]ast.Node),
	}
	for name := range reservedKeywords {
		c.allVars[name] = 1
	}

	// imports
	var importedPaths []string
	for _, importedPkg := range typesPkg.Imports() {
		varName := c.newVariableWithLevel(importedPkg.Name(), true, "")
		c.p.pkgVars[importedPkg.Path()] = varName
		archive.Imports = append(archive.Imports, PkgImport{Path: PkgPath(importedPkg.Path()), VarName: varName})
		importedPaths = append(importedPaths, importedPkg.Path())
	}
	sort.Strings(importedPaths)
	for _, impPath := range importedPaths {
		impOutput, err := importContext.Import(impPath)
		if err != nil {
			return nil, err
		}
		archive.AddDependenciesOf(impOutput)
	}

	var functions []*ast.FuncDecl
	var toplevelTypes []*types.TypeName
	var vars []*types.Var
	for _, file := range files {
		for k, v := range ast.NewCommentMap(fileSet, file, file.Comments) {
			c.p.comments[k] = v
		}

		for _, decl := range file.Decls {
			switch d := decl.(type) {
			case *ast.FuncDecl:
				sig := c.p.info.Defs[d.Name].(*types.Func).Type().(*types.Signature)
				var recvType types.Type
				if sig.Recv() != nil {
					recvType = sig.Recv().Type()
					if ptr, isPtr := recvType.(*types.Pointer); isPtr {
						recvType = ptr.Elem()
					}
				}
				o := c.p.info.Defs[d.Name].(*types.Func)
				c.p.funcContexts[o] = c.p.analyzeFunction(sig, d.Body)
				if sig.Recv() == nil {
					c.objectName(o) // register toplevel name
				}
				if !isBlank(d.Name) {
					functions = append(functions, d)
				}
			case *ast.GenDecl:
				switch d.Tok {
				case token.TYPE:
					for _, spec := range d.Specs {
						o := c.p.info.Defs[spec.(*ast.TypeSpec).Name].(*types.TypeName)
						toplevelTypes = append(toplevelTypes, o)
						c.objectName(o) // register toplevel name
					}
				case token.VAR:
					for _, spec := range d.Specs {
						for _, name := range spec.(*ast.ValueSpec).Names {
							if !isBlank(name) {
								o := c.p.info.Defs[name].(*types.Var)
								vars = append(vars, o)
								c.objectName(o) // register toplevel name
							}
						}
					}
				case token.CONST:
					// skip, constants are inlined
				}
			}
		}
	}

	for {
		done := true
		for _, context := range c.p.funcContexts {
			for obj, calls := range context.localCalls {
				if len(c.p.funcContexts[obj].blocking) != 0 {
					for _, call := range calls {
						context.markBlocking(call)
					}
					delete(context.localCalls, obj)
					done = false
				}
			}
		}
		if done {
			break
		}
	}

	collectDependencies := func(self types.Object, f func()) []DepId {
		c.p.dependencies = make(map[types.Object]bool)
		f()
		var deps []string
		for dep := range c.p.dependencies {
			if dep != self {
				deps = append(deps, dep.Pkg().Path()+":"+dep.Name())
			}
		}
		sort.Strings(deps)
		depIds := make([]DepId, len(deps))
		for i, dep := range deps {
			depIds[i] = DepId(dep)
		}
		return depIds
	}

	// types
	for _, o := range toplevelTypes {
		typeName := c.objectName(o)
		var d Decl
		d.Vars = []string{typeName}
		d.DceFilters = []DepId{DepId(o.Name())}
		d.DceDeps = collectDependencies(o, func() {
			d.BodyCode = removeWhitespace(c.CatchOutput(0, func() { c.translateType(o, true) }), minify)
			d.InitCode = removeWhitespace(c.CatchOutput(1, func() { c.initType(o) }), minify)
		})
		archive.Declarations = append(archive.Declarations, d)
	}

	// variables
	varsWithInit := make(map[*types.Var]bool)
	for _, init := range c.p.info.InitOrder {
		for _, o := range init.Lhs {
			varsWithInit[o] = true
		}
	}
	for _, o := range vars {
		var d Decl
		if !o.Exported() {
			d.Vars = []string{c.objectName(o)}
		}
		if _, ok := varsWithInit[o]; !ok {
			d.DceDeps = collectDependencies(nil, func() {
				value := c.zeroValue(o.Type())
				if importPath == "runtime" && o.Name() == "sizeof_C_MStats" {
					value = "3712"
				}
				d.InitCode = removeWhitespace([]byte(fmt.Sprintf("\t\t%s = %s;\n", c.objectName(o), value)), minify)
			})
		}
		d.DceFilters = []DepId{DepId(o.Name())}
		archive.Declarations = append(archive.Declarations, d)
	}
	for _, init := range c.p.info.InitOrder {
		lhs := make([]ast.Expr, len(init.Lhs))
		for i, o := range init.Lhs {
			ident := ast.NewIdent(o.Name())
			c.p.info.Defs[ident] = o
			lhs[i] = c.setType(ident, o.Type())
			varsWithInit[o] = true
		}
		var d Decl
		d.DceDeps = collectDependencies(nil, func() {
			c.localVars = nil
			d.InitCode = removeWhitespace(c.CatchOutput(1, func() {
				ast.Walk(c, init.Rhs)
				c.translateStmt(&ast.AssignStmt{
					Lhs: lhs,
					Tok: token.DEFINE,
					Rhs: []ast.Expr{init.Rhs},
				}, "")
			}), minify)
			d.Vars = append(d.Vars, c.localVars...)
		})
		if len(init.Lhs) == 1 {
			v := hasCallVisitor{c.p.info, false}
			ast.Walk(&v, init.Rhs)
			if !v.hasCall {
				d.DceFilters = []DepId{DepId(init.Lhs[0].Name())}
			}
		}
		archive.Declarations = append(archive.Declarations, d)
	}

	// functions
	var mainFunc *types.Func
	for _, fun := range functions {
		o := c.p.info.Defs[fun.Name].(*types.Func)
		context := c.p.funcContexts[o]
		d := Decl{
			FullName: []byte(o.FullName()),
			Blocking: len(context.blocking) != 0,
		}
		if fun.Recv == nil {
			d.Vars = []string{c.objectName(o)}
			switch o.Name() {
			case "main":
				mainFunc = o
			case "init":
				d.InitCode = removeWhitespace(c.CatchOutput(1, func() {
					id := c.newIdent("", types.NewSignature(nil, nil, nil, nil, false))
					c.p.info.Uses[id] = o
					call := &ast.CallExpr{Fun: id}
					c.Visit(call)
					c.translateStmt(&ast.ExprStmt{X: call}, "")
				}), minify)
			default:
				d.DceFilters = []DepId{DepId(o.Name())}
			}
		}
		if fun.Recv != nil {
			recvType := o.Type().(*types.Signature).Recv().Type()
			ptr, isPointer := recvType.(*types.Pointer)
			namedRecvType, _ := recvType.(*types.Named)
			if isPointer {
				namedRecvType = ptr.Elem().(*types.Named)
			}
			d.DceFilters = []DepId{DepId(namedRecvType.Obj().Name())}
			if !fun.Name.IsExported() {
				d.DceFilters = append(d.DceFilters, DepId(fun.Name.Name))
			}
		}

		d.DceDeps = collectDependencies(o, func() {
			d.BodyCode = removeWhitespace(c.translateToplevelFunction(fun, context), minify)
		})
		archive.Declarations = append(archive.Declarations, d)
		if fun.Recv == nil && strings.HasPrefix(fun.Name.String(), "Test") {
			archive.Tests = append(archive.Tests, fun.Name.String())
		}
	}

	archive.BlockingInit = len(c.blocking) != 0

	// $run function
	if typesPkg.Name() == "main" {
		var stmts []ast.Stmt
		for _, dep := range archive.Dependencies {
			id := c.newIdent(fmt.Sprintf(`$packages["%s"].$init`, dep), types.NewSignature(nil, nil, nil, nil, false))
			call := &ast.CallExpr{Fun: id}
			depArchive, err := importContext.Import(string(dep))
			if err != nil {
				panic(err)
			}
			if depArchive.BlockingInit {
				c.blocking[call] = true
				c.flattened[call] = true
			}
			stmts = append(stmts, &ast.ExprStmt{X: call})
		}

		{
			id := c.newIdent("$pkg.$init", types.NewSignature(nil, nil, nil, nil, false))
			call := &ast.CallExpr{Fun: id}
			if archive.BlockingInit {
				c.blocking[call] = true
				c.flattened[call] = true
			}
			stmts = append(stmts, &ast.ExprStmt{X: call})
		}

		{
			id := c.newIdent("", types.NewSignature(nil, nil, nil, nil, false))
			c.p.info.Uses[id] = mainFunc
			call := &ast.CallExpr{Fun: id}
			c.Visit(call)
			stmts = append(stmts, &ast.ExprStmt{X: call})
		}

		archive.Declarations = append(archive.Declarations, Decl{
			BodyCode: removeWhitespace(append(append([]byte("\t$pkg.$run = function($b) {\n"), c.translateFunctionBody(stmts)...), []byte("\t};\n")...), minify),
		})
	}

	return archive, nil
}
Example #28
0
func runExtract(cmd *Command, args []string) error {
	if len(args) == 0 {
		args = []string{"."}
	}

	conf := loader.Config{
		Build:      &build.Default,
		ParserMode: parser.ParseComments,
	}

	// Use the initial packages from the command line.
	args, err := conf.FromArgs(args, false)
	if err != nil {
		return err
	}

	// Load, parse and type-check the whole program.
	iprog, err := conf.Load()
	if err != nil {
		return err
	}

	// print returns Go syntax for the specified node.
	print := func(n ast.Node) string {
		var buf bytes.Buffer
		format.Node(&buf, conf.Fset, n)
		return buf.String()
	}

	var translations []Translation

	for _, info := range iprog.InitialPackages() {
		for _, f := range info.Files {
			// Associate comments with nodes.
			cmap := ast.NewCommentMap(iprog.Fset, f, f.Comments)
			getComment := func(n ast.Node) string {
				cs := cmap.Filter(n).Comments()
				if len(cs) > 0 {
					return strings.TrimSpace(cs[0].Text())
				}
				return ""
			}

			// Find function calls.
			ast.Inspect(f, func(n ast.Node) bool {
				call, ok := n.(*ast.CallExpr)
				if !ok {
					return true
				}

				// Skip calls of functions other than
				// (*message.Printer).{Sp,Fp,P}rintf.
				sel, ok := call.Fun.(*ast.SelectorExpr)
				if !ok {
					return true
				}
				meth := info.Selections[sel]
				if meth == nil || meth.Kind() != types.MethodVal {
					return true
				}
				// TODO: remove cheap hack and check if the type either
				// implements some interface or is specifically of type
				// "golang.org/x/text/message".Printer.
				m, ok := extractFuncs[path.Base(meth.Recv().String())]
				if !ok {
					return true
				}

				// argn is the index of the format string.
				argn, ok := m[meth.Obj().Name()]
				if !ok || argn >= len(call.Args) {
					return true
				}

				// Skip calls with non-constant format string.
				fmtstr := info.Types[call.Args[argn]].Value
				if fmtstr == nil || fmtstr.Kind() != constant.String {
					return true
				}

				posn := conf.Fset.Position(call.Lparen)
				filepos := fmt.Sprintf("%s:%d:%d", filepath.Base(posn.Filename), posn.Line, posn.Column)

				// TODO: identify the type of the format argument. If it is not
				// a string, multiple keys may be defined.
				var key []string

				// TODO: replace substitutions (%v) with a translator friendly
				// notation. For instance:
				//     "%d files remaining" -> "{numFiles} files remaining", or
				//     "%d files remaining" -> "{arg1} files remaining"
				// Alternatively, this could be done at a later stage.
				msg := constant.StringVal(fmtstr)

				// Construct a Translation unit.
				c := Translation{
					Key:              key,
					Position:         filepath.Join(info.Pkg.Path(), filepos),
					Original:         Text{Msg: msg},
					ExtractedComment: getComment(call.Args[0]),
					// TODO(fix): this doesn't get the before comment.
					// Comment: getComment(call),
				}

				for i, arg := range call.Args[argn+1:] {
					var val string
					if v := info.Types[arg].Value; v != nil {
						val = v.ExactString()
					}
					posn := conf.Fset.Position(arg.Pos())
					filepos := fmt.Sprintf("%s:%d:%d", filepath.Base(posn.Filename), posn.Line, posn.Column)
					c.Args = append(c.Args, Argument{
						ID:             i + 1,
						Type:           info.Types[arg].Type.String(),
						UnderlyingType: info.Types[arg].Type.Underlying().String(),
						Expr:           print(arg),
						Value:          val,
						Comment:        getComment(arg),
						Position:       filepath.Join(info.Pkg.Path(), filepos),
						// TODO report whether it implements
						// interfaces plural.Interface,
						// gender.Interface.
					})
				}

				translations = append(translations, c)
				return true
			})
		}
	}

	data, err := json.MarshalIndent(translations, "", "    ")
	if err != nil {
		return err
	}
	for _, tag := range getLangs() {
		// TODO: merge with existing files, don't overwrite.
		os.MkdirAll(*dir, 0744)
		file := filepath.Join(*dir, fmt.Sprintf("gotext_%v.out.json", tag))
		if err := ioutil.WriteFile(file, data, 0744); err != nil {
			return fmt.Errorf("could not create file: %v", err)
		}
	}
	return nil
}