Exemple #1
0
func (pkg *Package) check(fs *token.FileSet, astFiles []*ast.File) error {
	pkg.defs = make(map[*ast.Ident]types.Object)
	pkg.uses = make(map[*ast.Ident]types.Object)
	pkg.spans = make(map[types.Object]Span)
	pkg.types = make(map[ast.Expr]types.TypeAndValue)
	// By providing a Config with our own error function, it will continue
	// past the first error. There is no need for that function to do anything.
	config := types.Config{
		Error: func(error) {},
	}
	info := &types.Info{
		Types: pkg.types,
		Defs:  pkg.defs,
		Uses:  pkg.uses,
	}
	typesPkg, err := config.Check(pkg.path, fs, astFiles, info)
	pkg.typesPkg = typesPkg
	// update spans
	for id, obj := range pkg.defs {
		pkg.growSpan(id, obj)
	}
	for id, obj := range pkg.uses {
		pkg.growSpan(id, obj)
	}
	return err
}
Exemple #2
0
func pkgForPath(path string) (*types.Package, error) {
	// collect filenames
	ctxt := build.Default
	pkginfo, err := ctxt.Import(path, "", 0)
	if err != nil {
		return nil, err
	}
	filenames := append(pkginfo.GoFiles, pkginfo.CgoFiles...)

	// parse files
	fset := token.NewFileSet()
	files := make([]*ast.File, len(filenames))
	for i, filename := range filenames {
		var err error
		files[i], err = parser.ParseFile(fset, filepath.Join(pkginfo.Dir, filename), nil, 0)
		if err != nil {
			return nil, err
		}
	}

	// typecheck files
	// (we only care about exports and thus can ignore function bodies)
	conf := types.Config{IgnoreFuncBodies: true, FakeImportC: true}
	return conf.Check(path, fset, files, nil)
}
Exemple #3
0
func checkPkgFiles(files []*ast.File) {
	type bailout struct{}
	conf := types.Config{
		FakeImportC: true,
		Error: func(err error) {
			if !*allErrors && errorCount >= 10 {
				panic(bailout{})
			}
			report(err)
		},
		Sizes: sizes,
	}
	if *gccgo {
		var inst gccgoimporter.GccgoInstallation
		inst.InitFromDriver("gccgo")
		conf.Import = inst.GetImporter(nil, nil)
	}

	defer func() {
		switch p := recover().(type) {
		case nil, bailout:
			// normal return or early exit
		default:
			// re-panic
			panic(p)
		}
	}()

	const path = "pkg" // any non-empty string will do for now
	conf.Check(path, fset, files, nil)
}
Exemple #4
0
func makeInfo(dir string, fset *token.FileSet, f *ast.File) (types.Info, error) {
	cfg := types.Config{IgnoreFuncBodies: true}

	info := types.Info{
		Types: make(map[ast.Expr]types.TypeAndValue),
		Defs:  make(map[*ast.Ident]types.Object),
		Uses:  make(map[*ast.Ident]types.Object),
	}

	_, err := cfg.Check(dir, fset, []*ast.File{f}, &info)
	return info, err
}
Exemple #5
0
// typeCheck creates a typedPackage from a package_
func typeCheck(p package_) (typedPackage, error) {
	tp := typedPackage{
		package_:  p,
		callTypes: make(map[ast.Expr]types.Type),
		identObjs: make(map[*ast.Ident]types.Object),
	}

	info := types.Info{
		Types:   tp.callTypes,
		Objects: tp.identObjs,
	}
	context := types.Config{Import: importer.NewImporter().Import}

	_, err := context.Check(p.path, p.fset, p.astFiles, &info)
	return tp, err
}
Exemple #6
0
func pkgForSource(src string) (*types.Package, error) {
	// parse file
	fset := token.NewFileSet()
	f, err := parser.ParseFile(fset, "", src, 0)
	if err != nil {
		return nil, err
	}

	// typecheck file
	conf := types.Config{
		// strconv exports IntSize as a constant. The type-checker must
		// use the same word size otherwise the result of the type-checker
		// and gc imports is different. We don't care about alignment
		// since none of the tests have exported constants depending
		// on alignment (see also issue 8366).
		Sizes: &types.StdSizes{WordSize: strconv.IntSize / 8, MaxAlign: 8},
	}
	return conf.Check("import-test", fset, []*ast.File{f}, nil)
}
Exemple #7
0
func processPackage(path string, fset *token.FileSet, files []*ast.File) {
	type bailout struct{}
	conf := types.Config{
		Error: func(err error) {
			if !*allErrors && errorCount >= 10 {
				panic(bailout{})
			}
			report(err)
		},
	}

	defer func() {
		switch err := recover().(type) {
		case nil, bailout:
		default:
			panic(err)
		}
	}()

	conf.Check(path, fset, files, nil)
}
Exemple #8
0
func GetAllDependencies(pkg string, config *types.Config) ([]*types.Package, error) {
	var dependencies []*types.Package // ordered
	imported := make(map[string]bool)
	var importPkg func(string, []string) error
	importPkg = func(importPath string, importing []string) error {
		if importPath == "unsafe" || importPath == "go/doc" {
			return nil
		}
		if _, found := imported[importPath]; found {
			return nil
		}
		for _, path := range importing {
			if path == importPath {
				return fmt.Errorf("package import cycle: %s -> %s", strings.Join(importing, " -> "), importPath)
			}
		}

		typesPkg, err := config.Import(config.Packages, importPath)
		if err != nil {
			return err
		}
		var imps []string
		for _, imp := range typesPkg.Imports() {
			imps = append(imps, imp.Path())
		}
		sort.Strings(imps)
		for _, imp := range imps {
			if err := importPkg(imp, append(importing, importPath)); err != nil {
				return err
			}
		}

		dependencies = append(dependencies, typesPkg)
		imported[importPath] = true
		return nil
	}
	importPkg("runtime", nil) // all packages depend on runtime
	err := importPkg(pkg, nil)
	return dependencies, err
}
Exemple #9
0
// Import implements the Importer type from go/types.
func (imp Importer) Import(imports map[string]*types.Package, path string) (pkg *types.Package, err error) {
	// types.Importer does not seem to be designed for recursive
	// parsing like we're doing here. Specifically, each nested import
	// will maintain its own imports map. This will lead to duplicate
	// imports and in turn packages, which will lead to funny errors
	// such as "cannot pass argument ip (variable of type net.IP) to
	// variable of type net.IP"
	//
	// To work around this, we keep a global imports map, allImports,
	// to which we add all nested imports, and which we use as the
	// cache, instead of imports.
	//
	// Since all nested imports will also use this importer, there
	// should be no way to end up with duplicate imports.

	// We first try to use GcImport directly. This has the downside of
	// using possibly out-of-date packages, but it has the upside of
	// not having to parse most of the Go standard library.

	buildPkg, buildErr := build.Import(path, ".", 0)
	// If we found no build dir, assume we're dealing with installed
	// but no source. If we found a build dir, only use GcImport if
	// it's in GOROOT. This way we always use up-to-date code for
	// normal packages but avoid parsing the standard library.
	if (buildErr == nil && buildPkg.Goroot) || buildErr != nil {
		pkg, err = types.GcImport(imp.Imports, path)
		if err == nil {
			// We don't use imports, but per API we have to add the package.
			imports[pkg.Path()] = pkg
			imp.Imports[pkg.Path()] = pkg
			return pkg, nil
		}
	}

	// See if we already imported this package
	if pkg = imp.Imports[path]; pkg != nil && pkg.Complete() {
		return pkg, nil
	}

	// allImports failed, try to use go/build
	if buildErr != nil {
		return nil, fmt.Errorf("build.Import failed: %s", buildErr)
	}

	// TODO check if the .a file is up to date and use it instead
	fileSet := token.NewFileSet()

	isGoFile := func(d os.FileInfo) bool {
		allFiles := make([]string, 0, len(buildPkg.GoFiles)+len(buildPkg.CgoFiles))
		allFiles = append(allFiles, buildPkg.GoFiles...)
		allFiles = append(allFiles, buildPkg.CgoFiles...)

		for _, file := range allFiles {
			if file == d.Name() {
				return true
			}
		}
		return false
	}
	pkgs, err := parser.ParseDir(fileSet, buildPkg.Dir, isGoFile, 0)
	if err != nil {
		return nil, err
	}

	delete(pkgs, "documentation")
	var astPkg *ast.Package
	var name string
	for name, astPkg = range pkgs {
		// Use the first non-main package, or the only package we
		// found.
		//
		// NOTE(dh) I can't think of a reason why there should be
		// multiple packages in a single directory, but ParseDir
		// accommodates for that possibility.
		if len(pkgs) == 1 || name != "main" {
			break
		}
	}

	if astPkg == nil {
		return nil, fmt.Errorf("can't find import: %s", name)
	}

	var ff []*ast.File
	for _, f := range astPkg.Files {
		ff = append(ff, f)
	}

	context := types.Config{
		Import: imp.Import,
	}

	pkg, err = context.Check(name, fileSet, ff, nil)
	if err != nil {
		return pkg, err
	}
	if !pkg.Complete() {
		pkg = types.NewPackage(pkg.Pos(), pkg.Path(), pkg.Name(), pkg.Scope(), pkg.Imports(), true)
	}

	imports[path] = pkg
	imp.Imports[path] = pkg
	return pkg, nil
}
Exemple #10
0
func (w *PkgWalker) Import(parentDir string, name string, conf *PkgConfig) (pkg *types.Package, err error) {
	defer func() {
		err := recover()
		if err != nil && typeVerbose {
			log.Println(err)
		}
	}()

	if strings.HasPrefix(name, ".") && parentDir != "" {
		name = filepath.Join(parentDir, name)
	}
	pkg = w.imported[name]
	if pkg != nil {
		if pkg == &w.importing {
			return nil, fmt.Errorf("cycle importing package %q", name)
		}
		return pkg, nil
	}

	if typeVerbose {
		log.Println("parser pkg", name)
	}

	var bp *build.Package
	if filepath.IsAbs(name) {
		bp, err = w.context.ImportDir(name, 0)
	} else {
		bp, err = w.context.Import(name, "", 0)
	}

	checkName := name

	if bp.ImportPath == "." {
		checkName = bp.Name
	} else {
		checkName = bp.ImportPath
	}

	if err != nil {
		return nil, err
		//if _, nogo := err.(*build.NoGoError); nogo {
		//	return
		//}
		//return
		//log.Fatalf("pkg %q, dir %q: ScanDir: %v", name, info.Dir, err)
	}

	filenames := append(append([]string{}, bp.GoFiles...), bp.CgoFiles...)
	filenames = append(filenames, bp.TestGoFiles...)

	if name == "runtime" {
		n := fmt.Sprintf("zgoos_%s.go", w.context.GOOS)
		if !contains(filenames, n) {
			filenames = append(filenames, n)
		}

		n = fmt.Sprintf("zgoarch_%s.go", w.context.GOARCH)
		if !contains(filenames, n) {
			filenames = append(filenames, n)
		}
	}

	parserFiles := func(filenames []string, cursor *FileCursor) (files []*ast.File) {
		for _, file := range filenames {
			var f *ast.File
			if cursor != nil && cursor.fileName == file {
				f, err = w.parseFile(bp.Dir, file, cursor.src)
				cursor.pos = token.Pos(w.fset.File(f.Pos()).Base()) + token.Pos(cursor.cursorPos)
				cursor.fileDir = bp.Dir
			} else {
				f, err = w.parseFile(bp.Dir, file, nil)
			}
			if err != nil && typeVerbose {
				log.Printf("error parsing package %s: %s\n", name, err)
			}
			files = append(files, f)
		}
		return
	}
	files := parserFiles(filenames, conf.Cursor)
	xfiles := parserFiles(bp.XTestGoFiles, conf.Cursor)

	typesConf := types.Config{
		IgnoreFuncBodies: conf.IgnoreFuncBodies,
		FakeImportC:      true,
		Packages:         w.gcimporter,
		Import: func(imports map[string]*types.Package, name string) (pkg *types.Package, err error) {
			if pkg != nil {
				return pkg, nil
			}
			if conf.AllowBinary && w.isBinaryPkg(name) {
				pkg = w.gcimporter[name]
				if pkg != nil && pkg.Complete() {
					return
				}
				pkg, err = gcimporter.Import(imports, name)
				if pkg != nil && pkg.Complete() {
					w.gcimporter[name] = pkg
					return
				}
			}
			return w.Import(bp.Dir, name, &PkgConfig{IgnoreFuncBodies: true, AllowBinary: true})
		},
		Error: func(err error) {
			if typeVerbose {
				log.Println(err)
			}
		},
	}
	if pkg == nil {
		pkg, err = typesConf.Check(checkName, w.fset, files, conf.Info)
	}
	w.imported[name] = pkg

	if len(xfiles) > 0 {
		xpkg, _ := typesConf.Check(checkName+"_test", w.fset, xfiles, conf.Info)
		w.imported[checkName+"_test"] = xpkg
	}
	return
}
Exemple #11
0
// doPackage analyzes the single package constructed from the named files, looking for
// the definition of ident.
func doPackage(pkg *ast.Package, fset *token.FileSet, ident string) {
	var files []*File
	found := false
	for name, astFile := range pkg.Files {
		if packageFlag && astFile.Doc == nil {
			continue
		}
		file := &File{
			fset:       fset,
			name:       name,
			ident:      ident,
			lowerIdent: strings.ToLower(ident),
			file:       astFile,
			comments:   ast.NewCommentMap(fset, astFile, astFile.Comments),
		}
		if regexpFlag && regexp.QuoteMeta(ident) != ident {
			// It's a regular expression.
			var err error
			file.regexp, err = regexp.Compile("^(?i:" + ident + ")$")
			if err != nil {
				fmt.Fprintf(os.Stderr, "regular expression `%s`:", err)
				os.Exit(2)
			}
		}
		switch {
		case strings.HasPrefix(name, goRootSrcPkg):
			file.urlPrefix = "http://golang.org/pkg"
			file.pathPrefix = goRootSrcPkg
		case strings.HasPrefix(name, goRootSrcCmd):
			file.urlPrefix = "http://golang.org/cmd"
			file.pathPrefix = goRootSrcCmd
		default:
			file.urlPrefix = "http://godoc.org"
			for _, path := range goPaths {
				p := filepath.Join(path, "src")
				if strings.HasPrefix(name, p) {
					file.pathPrefix = p
					break
				}
			}
		}
		file.urlPrefix = urlHeadTag + file.urlPrefix
		files = append(files, file)
		if found {
			continue
		}
		file.doPrint = false
		if packageFlag {
			file.pkgComments()
		} else {
			ast.Walk(file, file.file)
			if file.found {
				found = true
			}
		}
	}

	if !found {
		return
	}

	// Type check to build map from name to type.
	defs := make(map[*ast.Ident]types.Object)
	uses := make(map[*ast.Ident]types.Object)

	// By providing the Context with our own error function, it will continue
	// past the first error. There is no need for that function to do anything.
	config := types.Config{
		Error: func(error) {},
	}
	info := &types.Info{
		Defs: defs,
		Uses: uses,
	}
	path := ""
	var astFiles []*ast.File
	for name, astFile := range pkg.Files {
		if path == "" {
			path = name
		}
		astFiles = append(astFiles, astFile)
	}
	config.Check(path, fset, astFiles, info) // Ignore errors.

	// We need to search all files for methods, so record the full list in each file.
	for _, file := range files {
		file.allFiles = files
	}
	for _, file := range files {
		file.doPrint = true
		file.objs = uses
		if packageFlag {
			file.pkgComments()
		} else {
			ast.Walk(file, file.file)
		}
	}
}
Exemple #12
0
func (w *Walker) Import(name string) (pkg *types.Package) {
	pkg = w.imported[name]
	if pkg != nil {
		if pkg == &importing {
			log.Fatalf("cycle importing package %q", name)
		}
		return pkg
	}
	w.imported[name] = &importing

	// Determine package files.
	dir := filepath.Join(w.root, filepath.FromSlash(name))
	if fi, err := os.Stat(dir); err != nil || !fi.IsDir() {
		log.Fatalf("no source in tree for package %q", pkg)
	}

	context := w.context
	if context == nil {
		context = &build.Default
	}

	// Look in cache.
	// If we've already done an import with the same set
	// of relevant tags, reuse the result.
	var key string
	if usePkgCache {
		if tags, ok := pkgTags[dir]; ok {
			key = tagKey(dir, context, tags)
			if pkg := pkgCache[key]; pkg != nil {
				w.imported[name] = pkg
				return pkg
			}
		}
	}

	info, err := context.ImportDir(dir, 0)
	if err != nil {
		if _, nogo := err.(*build.NoGoError); nogo {
			return
		}
		log.Fatalf("pkg %q, dir %q: ScanDir: %v", name, dir, err)
	}

	// Save tags list first time we see a directory.
	if usePkgCache {
		if _, ok := pkgTags[dir]; !ok {
			pkgTags[dir] = info.AllTags
			key = tagKey(dir, context, info.AllTags)
		}
	}

	filenames := append(append([]string{}, info.GoFiles...), info.CgoFiles...)

	// Parse package files.
	var files []*ast.File
	for _, file := range filenames {
		f, err := w.parseFile(dir, file)
		if err != nil {
			log.Fatalf("error parsing package %s: %s", name, err)
		}
		files = append(files, f)
	}

	// Type-check package files.
	conf := types.Config{
		IgnoreFuncBodies: true,
		FakeImportC:      true,
		Import: func(imports map[string]*types.Package, name string) (*types.Package, error) {
			pkg := w.Import(name)
			imports[name] = pkg
			return pkg, nil
		},
	}
	pkg, err = conf.Check(name, fset, files, nil)
	if err != nil {
		ctxt := "<no context>"
		if w.context != nil {
			ctxt = fmt.Sprintf("%s-%s", w.context.GOOS, w.context.GOARCH)
		}
		log.Fatalf("error typechecking package %s: %s (%s)", name, err, ctxt)
	}

	if usePkgCache {
		pkgCache[key] = pkg
	}

	w.imported[name] = pkg
	return
}
Exemple #13
0
func TranslatePackage(importPath string, files []*ast.File, fileSet *token.FileSet, config *types.Config) ([]byte, error) {
	info := &types.Info{
		Types:      make(map[ast.Expr]types.Type),
		Values:     make(map[ast.Expr]exact.Value),
		Objects:    make(map[*ast.Ident]types.Object),
		Implicits:  make(map[ast.Node]types.Object),
		Selections: make(map[*ast.SelectorExpr]*types.Selection),
	}

	var errList ErrorList
	var previousErr error
	config.Error = func(err error) {
		if previousErr != nil && previousErr.Error() == err.Error() {
			return
		}
		errList = append(errList, err)
		previousErr = err
	}
	config.Sizes = sizes32
	typesPkg, err := config.Check(importPath, fileSet, files, info)
	if errList != nil {
		return nil, errList
	}
	if err != nil {
		return nil, err
	}
	config.Packages[importPath] = typesPkg

	c := &PkgContext{
		pkg:          typesPkg,
		info:         info,
		pkgVars:      make(map[string]string),
		objectVars:   make(map[types.Object]string),
		allVarNames:  make(map[string]int),
		postLoopStmt: make(map[string]ast.Stmt),
		positions:    make(map[int]token.Pos),
	}
	for _, name := range ReservedKeywords {
		c.allVarNames[name] = 1
	}

	functionsByType := make(map[types.Type][]*ast.FuncDecl)
	functionsByObject := make(map[types.Object]*ast.FuncDecl)
	var initStmts []ast.Stmt
	var typeSpecs []*ast.TypeSpec
	var constSpecs []*ast.ValueSpec
	var varSpecs []*ast.ValueSpec
	for _, file := range files {
		for _, decl := range file.Decls {
			switch d := decl.(type) {
			case *ast.FuncDecl:
				sig := c.info.Objects[d.Name].(*types.Func).Type().(*types.Signature)
				var recvType types.Type
				if sig.Recv() != nil {
					recvType = sig.Recv().Type()
					if ptr, isPtr := recvType.(*types.Pointer); isPtr {
						recvType = ptr.Elem()
					}
				}
				if sig.Recv() == nil && d.Name.Name == "init" {
					initStmts = append(initStmts, d.Body.List...)
					continue
				}
				functionsByType[recvType] = append(functionsByType[recvType], d)
				o := c.info.Objects[d.Name]
				functionsByObject[o] = d
				if sig.Recv() == nil {
					c.objectName(o) // register toplevel name
				}
			case *ast.GenDecl:
				switch d.Tok {
				case token.TYPE:
					for _, spec := range d.Specs {
						s := spec.(*ast.TypeSpec)
						typeSpecs = append(typeSpecs, s)
						c.objectName(c.info.Objects[s.Name]) // register toplevel name
					}
				case token.CONST:
					for _, spec := range d.Specs {
						s := spec.(*ast.ValueSpec)
						constSpecs = append(constSpecs, s)
						for _, name := range s.Names {
							if !isBlank(name) {
								c.objectName(c.info.Objects[name]) // register toplevel name
							}
						}
					}
				case token.VAR:
					for _, spec := range d.Specs {
						s := spec.(*ast.ValueSpec)
						varSpecs = append(varSpecs, s)
						for _, name := range s.Names {
							if !isBlank(name) {
								c.objectName(c.info.Objects[name]) // register toplevel name
							}
						}
					}
				}
			}
		}
	}

	// resolve var dependencies
	var unorderedSingleVarSpecs []*ast.ValueSpec
	pendingObjects := make(map[types.Object]bool)
	for _, spec := range varSpecs {
		for _, singleSpec := range c.splitValueSpec(spec) {
			if singleSpec.Values[0] == nil {
				continue
			}
			unorderedSingleVarSpecs = append(unorderedSingleVarSpecs, singleSpec)
			for _, name := range singleSpec.Names {
				pendingObjects[c.info.Objects[name]] = true
			}
		}
	}
	complete := false
	var intVarStmts []ast.Stmt
	for !complete {
		complete = true
		for i, spec := range unorderedSingleVarSpecs {
			if spec == nil {
				continue
			}
			v := DependencyCollector{info: c.info, functions: functionsByObject}
			ast.Walk(&v, spec.Values[0])
			currentObjs := make(map[types.Object]bool)
			for _, name := range spec.Names {
				currentObjs[c.info.Objects[name]] = true
			}
			ready := true
			for _, dep := range v.dependencies {
				if currentObjs[dep] {
					return nil, fmt.Errorf("%s: initialization loop", fileSet.Position(dep.Pos()).String())
				}
				ready = ready && !pendingObjects[dep]
			}
			if !ready {
				complete = false
				continue
			}
			lhs := make([]ast.Expr, len(spec.Names))
			for i, name := range spec.Names {
				lhs[i] = name
				delete(pendingObjects, c.info.Objects[name])
			}
			intVarStmts = append(intVarStmts, &ast.AssignStmt{
				Lhs: lhs,
				Tok: token.DEFINE,
				Rhs: spec.Values,
			})
			unorderedSingleVarSpecs[i] = nil
		}
	}

	c.Indent(func() {
		for _, importedPkg := range typesPkg.Imports() {
			varName := c.newVariable(importedPkg.Name())
			c.Printf(`var %s = Go$packages["%s"];`, varName, importedPkg.Path())
			c.pkgVars[importedPkg.Path()] = varName
		}

		// types and their functions
		for _, spec := range typeSpecs {
			obj := c.info.Objects[spec.Name]
			typeName := c.objectName(obj)
			c.Printf("var %s;", typeName)
			c.translateTypeSpec(spec)
			for _, fun := range functionsByType[obj.Type()] {
				_, isStruct := obj.Type().Underlying().(*types.Struct)
				c.translateMethod(typeName, isStruct, fun)
			}
			c.Printf("Go$pkg.%s = %s;", typeName, typeName)
		}

		// package functions
		for _, fun := range functionsByType[nil] {
			if isBlank(fun.Name) {
				continue
			}
			c.newScope(func() {
				name := c.objectName(c.info.Objects[fun.Name])
				params := c.translateParams(fun.Type)
				c.Printf("var %s = function(%s) {", name, strings.Join(params, ", "))
				c.Indent(func() {
					jsCode, _ := typesPkg.Scope().Lookup("js_" + name).(*types.Const)
					if jsCode != nil {
						c.Write([]byte(exact.StringVal(jsCode.Val())))
						c.Write([]byte{'\n'})
						return
					}
					if fun.Body == nil {
						c.Printf(`throw new Go$Panic("Native function not implemented: %s");`, name)
						return
					}

					c.translateFunctionBody(fun.Body.List, c.info.Objects[fun.Name].Type().(*types.Signature))
				})
				c.Printf("};")
			})
		}

		// constants
		for _, spec := range constSpecs {
			for _, name := range spec.Names {
				if isBlank(name) || strings.HasPrefix(name.Name, "js_") {
					continue
				}
				o := c.info.Objects[name].(*types.Const)
				c.info.Types[name] = o.Type()
				c.info.Values[name] = o.Val()
				c.Printf("%s = %s;", c.objectName(o), c.translateExpr(name))
			}
		}

		// variables
		for _, spec := range varSpecs {
			for _, name := range spec.Names {
				o := c.info.Objects[name].(*types.Var)
				c.Printf("%s = %s;", c.objectName(o), c.zeroValue(o.Type()))
			}
		}

		// native implementations
		if native, hasNative := natives[importPath]; hasNative {
			c.Write([]byte(strings.TrimSpace(native)))
			c.Write([]byte{'\n'})
		}

		// exports for package functions
		for _, fun := range functionsByType[nil] {
			name := fun.Name.Name
			if fun.Name.IsExported() || name == "main" {
				c.Printf("Go$pkg.%s = %s;", name, name)
			}
		}

		// init function
		c.Printf("Go$pkg.init = function() {")
		c.Indent(func() {
			c.translateFunctionBody(append(intVarStmts, initStmts...), nil)
		})
		c.Printf("};")
	})

	return c.output, nil
}
Exemple #14
0
func (p *Parser) importSourcePackage(fs *token.FileSet, path string, importPath string, unsaved map[string]UnsavedDocument, options Options, info *types.Info) (*ast.File, []*ast.File, *types.Package, error) {
	ctx := build.Default

	if len(options.GoPath) > 0 {
		ctx.GOPATH = options.GoPath
	}

	ctx.BuildTags = options.BuildConstraints

	var pkg *build.Package
	var err error

	if filepath.IsAbs(importPath) {
		pkg, err = ctx.ImportDir(importPath, 0)
	} else {
		pkg, err = ctx.Import(importPath, "", 0)
	}

	if err != nil {
		return nil, nil, nil, err
	}

	files := make([]string, len(pkg.GoFiles))

	for i, v := range pkg.GoFiles {
		files[i] = filepath.Join(pkg.Dir, v)
	}

	f, astf, err := p.Ast(fs, path, files, unsaved)

	if len(astf) == 0 {
		return nil, nil, nil, err
	}

	errors, _ := err.(scanner.ErrorList)

	c := types.Config{
		Error: func(err error) {
			if e, ok := err.(types.Error); ok && len(path) != 0 {
				pos := e.Fset.Position(e.Pos)

				if pos.Filename == path {
					errors.Add(pos, e.Msg)
				}
			}
		},

		Import: func(imports map[string]*types.Package, path string) (*types.Package, error) {
			return p.importSourceFirst(imports, fs, path, unsaved, options, info)
		},
	}

	tpkg, _ := c.Check(filepath.Base(importPath), fs, astf, info)

	errors.RemoveMultiples()
	errors.Sort()

	if len(errors) == 0 {
		err = nil
	} else {
		err = errors
	}

	return f, astf, tpkg, err
}