// TarjanSCC returns the strongly connected components of the graph g using Tarjan's algorithm. // // A strongly connected component of a graph is a set of vertices where it's possible to reach any // vertex in the set from any other (meaning there's a cycle between them.) // // Generally speaking, a directed graph where the number of strongly connected components is equal // to the number of nodes is acyclic, unless you count reflexive edges as a cycle (which requires // only a little extra testing.) // func TarjanSCC(g graph.Directed) [][]graph.Node { nodes := g.Nodes() t := tarjan{ succ: g.From, indexTable: make(map[int]int, len(nodes)), lowLink: make(map[int]int, len(nodes)), onStack: make(internal.IntSet, len(nodes)), } for _, v := range nodes { if t.indexTable[v.ID()] == 0 { t.strongconnect(v) } } return t.sccs }
// PageRankSparse returns the PageRank weights for nodes of the sparse directed // graph g using the given damping factor and terminating when the 2-norm of the // vector difference between iterations is below tol. The returned map is // keyed on the graph node IDs. func PageRankSparse(g graph.Directed, damp, tol float64) map[int]float64 { // PageRankSparse is implemented according to "How Google Finds Your Needle // in the Web's Haystack". // // G.I^k = alpha.H.I^k + alpha.A.I^k + (1-alpha).1/n.1.I^k // // http://www.ams.org/samplings/feature-column/fcarc-pagerank nodes := g.Nodes() indexOf := make(map[int]int, len(nodes)) for i, n := range nodes { indexOf[n.ID()] = i } m := make(rowCompressedMatrix, len(nodes)) var dangling compressedRow df := damp / float64(len(nodes)) for j, u := range nodes { to := g.From(u) f := damp / float64(len(to)) for _, v := range to { m.addTo(indexOf[v.ID()], j, f) } if len(to) == 0 { dangling.addTo(j, df) } } last := make([]float64, len(nodes)) for i := range last { last[i] = 1 } lastV := mat64.NewVector(len(nodes), last) vec := make([]float64, len(nodes)) var sum float64 for i := range vec { r := rand.NormFloat64() sum += r vec[i] = r } f := 1 / sum for i := range vec { vec[i] *= f } v := mat64.NewVector(len(nodes), vec) dt := (1 - damp) / float64(len(nodes)) for { lastV, v = v, lastV m.mulVecUnitary(v, lastV) // First term of the G matrix equation; with := dangling.dotUnitary(lastV) // Second term; away := onesDotUnitary(dt, lastV) // Last term. floats.AddConst(with+away, v.RawVector().Data) if normDiff(vec, last) < tol { break } } ranks := make(map[int]float64, len(nodes)) for i, r := range v.RawVector().Data { ranks[nodes[i].ID()] = r } return ranks }
// PageRank returns the PageRank weights for nodes of the directed graph g // using the given damping factor and terminating when the 2-norm of the // vector difference between iterations is below tol. The returned map is // keyed on the graph node IDs. func PageRank(g graph.Directed, damp, tol float64) map[int]float64 { // PageRank is implemented according to "How Google Finds Your Needle // in the Web's Haystack". // // G.I^k = alpha.S.I^k + (1-alpha).1/n.1.I^k // // http://www.ams.org/samplings/feature-column/fcarc-pagerank nodes := g.Nodes() indexOf := make(map[int]int, len(nodes)) for i, n := range nodes { indexOf[n.ID()] = i } m := mat64.NewDense(len(nodes), len(nodes), nil) dangling := damp / float64(len(nodes)) for j, u := range nodes { to := g.From(u) f := damp / float64(len(to)) for _, v := range to { m.Set(indexOf[v.ID()], j, f) } if len(to) == 0 { for i := range nodes { m.Set(i, j, dangling) } } } mat := m.RawMatrix().Data dt := (1 - damp) / float64(len(nodes)) for i := range mat { mat[i] += dt } last := make([]float64, len(nodes)) for i := range last { last[i] = 1 } lastV := mat64.NewVector(len(nodes), last) vec := make([]float64, len(nodes)) var sum float64 for i := range vec { r := rand.NormFloat64() sum += r vec[i] = r } f := 1 / sum for i := range vec { vec[i] *= f } v := mat64.NewVector(len(nodes), vec) for { lastV, v = v, lastV v.MulVec(m, lastV) if normDiff(vec, last) < tol { break } } ranks := make(map[int]float64, len(nodes)) for i, r := range v.RawVector().Data { ranks[nodes[i].ID()] = r } return ranks }
// HITS returns the Hyperlink-Induced Topic Search hub-authority scores for // nodes of the directed graph g. HITS terminates when the 2-norm of the // vector difference between iterations is below tol. The returned map is // keyed on the graph node IDs. func HITS(g graph.Directed, tol float64) map[int]HubAuthority { nodes := g.Nodes() // Make a topological copy of g with dense node IDs. indexOf := make(map[int]int, len(nodes)) for i, n := range nodes { indexOf[n.ID()] = i } nodesLinkingTo := make([][]int, len(nodes)) nodesLinkedFrom := make([][]int, len(nodes)) for i, n := range nodes { for _, u := range g.To(n) { nodesLinkingTo[i] = append(nodesLinkingTo[i], indexOf[u.ID()]) } for _, v := range g.From(n) { nodesLinkedFrom[i] = append(nodesLinkedFrom[i], indexOf[v.ID()]) } } indexOf = nil w := make([]float64, 4*len(nodes)) auth := w[:len(nodes)] hub := w[len(nodes) : 2*len(nodes)] for i := range nodes { auth[i] = 1 hub[i] = 1 } deltaAuth := w[2*len(nodes) : 3*len(nodes)] deltaHub := w[3*len(nodes):] var norm float64 for { norm = 0 for v := range nodes { var a float64 for _, u := range nodesLinkingTo[v] { a += hub[u] } deltaAuth[v] = auth[v] auth[v] = a norm += a * a } norm = math.Sqrt(norm) for i := range auth { auth[i] /= norm deltaAuth[i] -= auth[i] } norm = 0 for u := range nodes { var h float64 for _, v := range nodesLinkedFrom[u] { h += auth[v] } deltaHub[u] = hub[u] hub[u] = h norm += h * h } norm = math.Sqrt(norm) for i := range hub { hub[i] /= norm deltaHub[i] -= hub[i] } if floats.Norm(deltaAuth, 2) < tol && floats.Norm(deltaHub, 2) < tol { break } } hubAuth := make(map[int]HubAuthority, len(nodes)) for i, n := range nodes { hubAuth[n.ID()] = HubAuthority{Hub: hub[i], Authority: auth[i]} } return hubAuth }