// PreferentialAttachment constructs a graph in the destination, dst, of order n. // The graph is constructed successively starting from an m order graph with one // node having degree m-1. At each iteration of graph addition, one node is added // with m additional edges joining existing nodes with probability proportional // to the nodes' degrees. If src is not nil it is used as the random source, // otherwise rand.Float64 is used. // // The algorithm is essentially as described in http://arxiv.org/abs/cond-mat/0110452 // after 10.1126/science.286.5439.509. func PreferentialAttachment(dst graph.UndirectedBuilder, n, m int, src *rand.Rand) error { if n <= m { return fmt.Errorf("gen: n <= m: n=%v m=%d", n, m) } // Initial condition. wt := make([]float64, n) for u := 0; u < m; u++ { if !dst.Has(simple.Node(u)) { dst.AddNode(simple.Node(u)) } // We need to give equal probability for // adding the first generation of edges. wt[u] = 1 } ws := sample.NewWeighted(wt, src) for i := range wt { // These weights will organically grow // after the first growth iteration. wt[i] = 0 } // Growth. for v := m; v < n; v++ { for i := 0; i < m; i++ { // Preferential attachment. u, ok := ws.Take() if !ok { return errors.New("gen: depleted distribution") } dst.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) wt[u]++ wt[v]++ } ws.ReweightAll(wt) } return nil }
// NavigableSmallWorld constructs an N-dimensional grid with guaranteed local connectivity // and random long-range connectivity in the destination, dst. The dims parameters specifies // the length of each of the N dimensions, p defines the Manhattan distance between local // nodes, and q defines the number of out-going long-range connections from each node. Long- // range connections are made with a probability proportional to |d(u,v)|^-r where d is the // Manhattan distance between non-local nodes. // // The algorithm is essentially as described on p4 of http://www.cs.cornell.edu/home/kleinber/swn.pdf. func NavigableSmallWorld(dst GraphBuilder, dims []int, p, q int, r float64, src *rand.Rand) (err error) { if p < 1 { return fmt.Errorf("gen: bad local distance: p=%v", p) } if q < 0 { return fmt.Errorf("gen: bad distant link count: q=%v", q) } if r < 0 { return fmt.Errorf("gen: bad decay constant: r=%v", r) } n := 1 for _, d := range dims { n *= d } for i := 0; i < n; i++ { if !dst.Has(simple.Node(i)) { dst.AddNode(simple.Node(i)) } } hasEdge := dst.HasEdgeBetween d, isDirected := dst.(graph.Directed) if isDirected { hasEdge = d.HasEdgeFromTo } locality := make([]int, len(dims)) for i := range locality { locality[i] = p*2 + 1 } iterateOver(dims, func(u []int) { uid := idFrom(u, dims) iterateOver(locality, func(delta []int) { d := manhattanDelta(u, delta, dims, -p) if d == 0 || d > p { return } vid := idFromDelta(u, delta, dims, -p) e := simple.Edge{F: simple.Node(uid), T: simple.Node(vid), W: 1} if uid > vid { e.F, e.T = e.T, e.F } if !hasEdge(e.From(), e.To()) { dst.SetEdge(e) } if !isDirected { return } e.F, e.T = e.T, e.F if !hasEdge(e.From(), e.To()) { dst.SetEdge(e) } }) }) defer func() { r := recover() if r != nil { if r != "depleted distribution" { panic(r) } err = errors.New("depleted distribution") } }() w := make([]float64, n) ws := sample.NewWeighted(w, src) iterateOver(dims, func(u []int) { uid := idFrom(u, dims) iterateOver(dims, func(v []int) { d := manhattanBetween(u, v) if d <= p { return } w[idFrom(v, dims)] = math.Pow(float64(d), -r) }) ws.ReweightAll(w) for i := 0; i < q; i++ { vid, ok := ws.Take() if !ok { panic("depleted distribution") } e := simple.Edge{F: simple.Node(uid), T: simple.Node(vid), W: 1} if !isDirected && uid > vid { e.F, e.T = e.T, e.F } if !hasEdge(e.From(), e.To()) { dst.SetEdge(e) } } for i := range w { w[i] = 0 } }) return nil }
// TunableClusteringScaleFree constructs a graph in the destination, dst, of order n. // The graph is constructed successively starting from an m order graph with one node // having degree m-1. At each iteration of graph addition, one node is added with m // additional edges joining existing nodes with probability proportional to the nodes' // degrees. The edges are formed as a triad with probability, p. // If src is not nil it is used as the random source, otherwise rand.Float64 and // rand.Intn are used. // // The algorithm is essentially as described in http://arxiv.org/abs/cond-mat/0110452. func TunableClusteringScaleFree(dst graph.UndirectedBuilder, n, m int, p float64, src *rand.Rand) error { if p < 0 || p > 1 { return fmt.Errorf("gen: bad probability: p=%v", p) } if n <= m { return fmt.Errorf("gen: n <= m: n=%v m=%d", n, m) } var ( rnd func() float64 rndN func(int) int ) if src == nil { rnd = rand.Float64 rndN = rand.Intn } else { rnd = src.Float64 rndN = src.Intn } // Initial condition. wt := make([]float64, n) for u := 0; u < m; u++ { if !dst.Has(simple.Node(u)) { dst.AddNode(simple.Node(u)) } // We need to give equal probability for // adding the first generation of edges. wt[u] = 1 } ws := sample.NewWeighted(wt, src) for i := range wt { // These weights will organically grow // after the first growth iteration. wt[i] = 0 } // Growth. for v := m; v < n; v++ { var u int pa: for i := 0; i < m; i++ { // Triad formation. if i != 0 && rnd() < p { for _, w := range permute(dst.From(simple.Node(u)), rndN) { wid := w.ID() if wid == v || dst.HasEdgeBetween(w, simple.Node(v)) { continue } dst.SetEdge(simple.Edge{F: w, T: simple.Node(v), W: 1}) wt[wid]++ wt[v]++ continue pa } } // Preferential attachment. for { var ok bool u, ok = ws.Take() if !ok { return errors.New("gen: depleted distribution") } if u == v || dst.HasEdgeBetween(simple.Node(u), simple.Node(v)) { continue } dst.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) wt[u]++ wt[v]++ break } } ws.ReweightAll(wt) } return nil }