// Set sets the map entry for key to val, // and returns the previous entry, if any. func (m *M) Set(key types.Type, value interface{}) (prev interface{}) { if m.table != nil { hash := m.hasher.Hash(key) bucket := m.table[hash] var hole *entry for i, e := range bucket { if e.Key == nil { hole = &bucket[i] } else if types.IsIdentical(key, e.Key) { prev = e.value bucket[i].value = value return } } if hole != nil { *hole = entry{key, value} // overwrite deleted entry } else { m.table[hash] = append(bucket, entry{key, value}) } } else { if m.hasher.memo == nil { m.hasher = MakeHasher() } hash := m.hasher.Hash(key) m.table = map[uint32][]entry{hash: {entry{key, value}}} } m.length++ return }
func checkEqualButNotIdentical(t *testing.T, x, y types.Type, comment string) { if !types.IsIdentical(x, y) { t.Errorf("%s: not equal: %s, %s", comment, x, y) } if x == y { t.Errorf("%s: identical: %v, %v", comment, x, y) } }
func (n *copyNode) connectable(t types.Type, p *port) bool { var elem types.Type if dst, ok := underlying(n.dst.obj.Type).(*types.Slice); ok { elem = dst.Elem } switch src := underlying(n.src.obj.Type).(type) { case *types.Basic: elem = types.Typ[types.Byte] case *types.Slice: elem = src.Elem } switch t := underlying(t).(type) { case *types.Basic: return p == n.src && t.Info&types.IsString != 0 && (elem == nil || types.IsIdentical(elem, types.Typ[types.Byte])) case *types.Slice: return elem == nil || types.IsIdentical(t.Elem, elem) } return false }
// At returns the map entry for the given key. // The result is nil if the entry is not present. // func (m *M) At(key types.Type) interface{} { if m != nil && m.table != nil { for _, e := range m.table[m.hasher.Hash(key)] { if e.Key != nil && types.IsIdentical(key, e.Key) { return e.value } } } return nil }
// Delete removes the entry with the given key, if any. // It returns true if the entry was found. // func (m *M) Delete(key types.Type) bool { if m != nil && m.table != nil { hash := m.hasher.Hash(key) bucket := m.table[hash] for i, e := range bucket { if e.Key != nil && types.IsIdentical(key, e.Key) { // We can't compact the bucket as it // would disturb iterators. bucket[i] = entry{} m.length-- return true } } } return false }
func (b browser) filteredObjs() (objs objects) { add := func(obj types.Object) { if invisible(obj, b.currentPkg) { return } if _, ok := obj.(*pkgObject); ok || b.options.objFilter == nil || b.options.objFilter(obj) { objs = append(objs, obj) } } addSubPkgs := func(importPath string) { seen := map[string]bool{} for _, srcDir := range build.Default.SrcDirs() { files, err := ioutil.ReadDir(filepath.Join(srcDir, importPath)) if err != nil { continue } for _, f := range files { name := filepath.Base(f.Name()) if !f.IsDir() || !unicode.IsLetter([]rune(name)[0]) || name == "testdata" || seen[name] { continue } if _, ok := b.newObj.(*pkgObject); ok && name == b.oldName { // when editing a package path, it will be added in filteredObjs as newObj, so don't add it here continue } seen[name] = true importPath := path.Join(importPath, name) pkgObj, ok := pkgObjects[importPath] if !ok { if pkg, err := build.Import(importPath, "", build.AllowBinary); err == nil { name = pkg.Name } pkgObj = &pkgObject{nil, path.Base(importPath), srcDir, importPath, name} pkgObjects[importPath] = pkgObj } add(pkgObj) } } } if b.typ != nil { mset := types.NewMethodSet(b.typ) for i := 0; i < mset.Len(); i++ { m := mset.At(i) // m.Type() has the correct receiver for inherited methods (m.Obj does not) add(types.NewFunc(0, m.Obj.GetPkg(), m.Obj.GetName(), m.Type().(*types.Signature))) } fset := types.NewFieldSet(b.typ) for i := 0; i < fset.Len(); i++ { f := fset.At(i) add(field{f.Obj.(*types.Var), f.Recv, f.Indirect}) } } else if len(b.path) > 0 { switch obj := b.path[0].(type) { case *pkgObject: if pkg, err := getPackage(obj.importPath); err == nil { for _, obj := range pkg.Scope().Objects { add(obj) } } else { if _, ok := err.(*build.NoGoError); !ok { fmt.Println(err) } pkgs[obj.importPath] = types.NewPackage(obj.importPath, obj.pkgName, types.NewScope(types.Universe)) } addSubPkgs(obj.importPath) case *types.TypeName: for _, m := range intuitiveMethodSet(obj.Type) { if types.IsIdentical(m.Obj.(*types.Func).Type.(*types.Signature).Recv.Type, m.Recv) { // preserve Object identity for non-inherited methods so that fluxObjs works add(m.Obj) } else { // m.Type() has the correct receiver for inherited methods (m.Obj does not) add(types.NewFunc(0, m.Obj.GetPkg(), m.Obj.GetName(), m.Type().(*types.Signature))) } } } } else { for _, name := range []string{"break", "call", "continue", "convert", "defer", "func", "go", "if", "loop", "return", "select", "typeAssert"} { add(special{newVar(name, nil)}) } for _, name := range []string{"=", "*"} { add(newVar(name, nil)) } pkgs := b.imports if b.currentPkg != nil { pkgs = append(pkgs, b.currentPkg) } for _, p := range pkgs { for _, obj := range p.Scope().Objects { add(obj) } } for _, obj := range types.Universe.Objects { switch obj.GetName() { case "nil", "print", "println": continue } add(obj) } for _, op := range []string{"!", "&&", "||", "+", "-", "*", "/", "%", "&", "|", "^", "&^", "<<", ">>", "==", "!=", "<", "<=", ">", ">=", "[]", "[:]", "<-"} { add(types.NewFunc(0, nil, op, nil)) } for _, t := range []*types.TypeName{protoPointer, protoArray, protoSlice, protoMap, protoChan, protoFunc, protoInterface, protoStruct} { add(t) } addSubPkgs("") } sort.Sort(objs) return }