rm CondaPkg environment

This commit is contained in:
ton
2023-04-06 13:53:47 +07:00
parent 0a57ed7884
commit c43d949309
3329 changed files with 5725 additions and 447022 deletions

View File

@@ -17,3 +17,4 @@ from .second_order import *
from .subgraph_alg import *
from .trophic import *
from .voterank_alg import *
from .laplacian import *

View File

@@ -224,7 +224,7 @@ def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=No
if k is None:
nodes = G
else:
nodes = seed.sample(G.nodes(), k)
nodes = seed.sample(list(G.nodes()), k)
for s in nodes:
# single source shortest paths
if weight is None: # use BFS

View File

@@ -1,5 +1,7 @@
"""Betweenness centrality measures for subsets of nodes."""
from networkx.algorithms.centrality.betweenness import _add_edge_keys
from networkx.algorithms.centrality.betweenness import (
_add_edge_keys,
)
from networkx.algorithms.centrality.betweenness import (
_single_source_dijkstra_path_basic as dijkstra,
)

View File

@@ -59,7 +59,7 @@ def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0):
# all possible ties of connections that u and b share
possib = combinations(ST, 2)
total = 0
for (s, t) in possib:
for s, t in possib:
# neighbors of s that are in G_u, not including u and v
nbrs_s = u_nbrs.intersection(G_u[s]) - set_uv
# s and t are not directly connected

View File

@@ -207,7 +207,7 @@ def _group_preprocessing(G, set_v, weight):
else: # use Dijkstra's algorithm
S, P, sigma[s], D[s] = _single_source_dijkstra_path_basic(G, s, weight)
betweenness, delta[s] = _accumulate_endpoints(betweenness, S, P, sigma[s], s)
for i in delta[s].keys(): # add the paths from s to i and rescale sigma
for i in delta[s]: # add the paths from s to i and rescale sigma
if s != i:
delta[s][i] += 1
if weight is not None:
@@ -414,7 +414,7 @@ def _dfbnb(G, k, DF_tree, max_GBC, root, D, max_group, nodes, greedy):
if len(DF_tree.nodes[root]["GM"]) == k and DF_tree.nodes[root]["GBC"] > max_GBC:
return DF_tree.nodes[root]["GBC"], DF_tree, DF_tree.nodes[root]["GM"]
# stopping condition - if the size of group members equal to k or there are less than
# k - |GM| in the candidate list or the heuristic function plus the GBC is bellow the
# k - |GM| in the candidate list or the heuristic function plus the GBC is below the
# maximal GBC found then prune
if (
len(DF_tree.nodes[root]["GM"]) == k
@@ -682,7 +682,7 @@ def group_degree_centrality(G, S):
Journal of Mathematical Sociology. 23(3): 181-201. 1999.
http://www.analytictech.com/borgatti/group_centrality.htm
"""
centrality = len(set().union(*list(set(G.neighbors(i)) for i in S)) - set(S))
centrality = len(set().union(*[set(G.neighbors(i)) for i in S]) - set(S))
centrality /= len(G.nodes()) - len(S)
return centrality

View File

@@ -0,0 +1,136 @@
"""
Laplacian centrality measures.
"""
import networkx as nx
__all__ = ["laplacian_centrality"]
def laplacian_centrality(
G, normalized=True, nodelist=None, weight="weight", walk_type=None, alpha=0.95
):
r"""Compute the Laplacian centrality for nodes in the graph `G`.
The Laplacian Centrality of a node ``i`` is measured by the drop in the
Laplacian Energy after deleting node ``i`` from the graph. The Laplacian Energy
is the sum of the squared eigenvalues of a graph's Laplacian matrix.
.. math::
C_L(u_i,G) = \frac{(\Delta E)_i}{E_L (G)} = \frac{E_L (G)-E_L (G_i)}{E_L (G)}
E_L (G) = \sum_{i=0}^n \lambda_i^2
Where $E_L (G)$ is the Laplacian energy of graph `G`,
E_L (G_i) is the Laplacian energy of graph `G` after deleting node ``i``
and $\lambda_i$ are the eigenvalues of `G`'s Laplacian matrix.
This formula shows the normalized value. Without normalization,
the numerator on the right side is returned.
Parameters
----------
G : graph
A networkx graph
normalized : bool (default = True)
If True the centrality score is scaled so the sum over all nodes is 1.
If False the centrality score for each node is the drop in Laplacian
energy when that node is removed.
nodelist : list, optional (default = None)
The rows and columns are ordered according to the nodes in nodelist.
If nodelist is None, then the ordering is produced by G.nodes().
weight: string or None, optional (default=`weight`)
Optional parameter `weight` to compute the Laplacian matrix.
The edge data key used to compute each value in the matrix.
If None, then each edge has weight 1.
walk_type : string or None, optional (default=None)
Optional parameter `walk_type` used when calling
:func:`directed_laplacian_matrix <networkx.directed_laplacian_matrix>`.
If None, the transition matrix is selected depending on the properties
of the graph. Otherwise can be `random`, `lazy`, or `pagerank`.
alpha : real (default = 0.95)
Optional parameter `alpha` used when calling
:func:`directed_laplacian_matrix <networkx.directed_laplacian_matrix>`.
(1 - alpha) is the teleportation probability used with pagerank.
Returns
-------
nodes : dictionary
Dictionary of nodes with Laplacian centrality as the value.
Examples
--------
>>> G = nx.Graph()
>>> edges = [(0, 1, 4), (0, 2, 2), (2, 1, 1), (1, 3, 2), (1, 4, 2), (4, 5, 1)]
>>> G.add_weighted_edges_from(edges)
>>> sorted((v, f"{c:0.2f}") for v, c in laplacian_centrality(G).items())
[(0, '0.70'), (1, '0.90'), (2, '0.28'), (3, '0.22'), (4, '0.26'), (5, '0.04')]
Notes
-----
The algorithm is implemented based on [1]_ with an extension to directed graphs
using the ``directed_laplacian_matrix`` function.
Raises
------
NetworkXPointlessConcept
If the graph `G` is the null graph.
References
----------
.. [1] Qi, X., Fuller, E., Wu, Q., Wu, Y., and Zhang, C.-Q. (2012).
Laplacian centrality: A new centrality measure for weighted networks.
Information Sciences, 194:240-253.
https://math.wvu.edu/~cqzhang/Publication-files/my-paper/INS-2012-Laplacian-W.pdf
See Also
--------
directed_laplacian_matrix
laplacian_matrix
"""
import numpy as np
import scipy as sp
import scipy.linalg # call as sp.linalg
if len(G) == 0:
raise nx.NetworkXPointlessConcept("null graph has no centrality defined")
if nodelist != None:
nodeset = set(G.nbunch_iter(nodelist))
if len(nodeset) != len(nodelist):
raise nx.NetworkXError("nodelist has duplicate nodes or nodes not in G")
nodes = nodelist + [n for n in G if n not in nodeset]
else:
nodelist = nodes = list(G)
if G.is_directed():
lap_matrix = nx.directed_laplacian_matrix(G, nodes, weight, walk_type, alpha)
else:
lap_matrix = nx.laplacian_matrix(G, nodes, weight).toarray()
full_energy = np.power(sp.linalg.eigh(lap_matrix, eigvals_only=True), 2).sum()
# calculate laplacian centrality
laplace_centralities_dict = {}
for i, node in enumerate(nodelist):
# remove row and col i from lap_matrix
all_but_i = list(np.arange(lap_matrix.shape[0]))
all_but_i.remove(i)
A_2 = lap_matrix[all_but_i, :][:, all_but_i]
# Adjust diagonal for removed row
new_diag = lap_matrix.diagonal() - abs(lap_matrix[:, i])
np.fill_diagonal(A_2, new_diag[all_but_i])
new_energy = np.power(sp.linalg.eigh(A_2, eigvals_only=True), 2).sum()
lapl_cent = full_energy - new_energy
if normalized:
lapl_cent = lapl_cent / full_energy
laplace_centralities_dict[node] = lapl_cent
return laplace_centralities_dict

View File

@@ -9,7 +9,6 @@ import networkx as nx
class TestDegreeCentrality:
def setup_method(self):
self.K = nx.krackhardt_kite_graph()
self.P3 = nx.path_graph(3)
self.K5 = nx.complete_graph(5)

View File

@@ -128,25 +128,25 @@ class TestEigenvectorCentralityDirected:
def test_eigenvector_centrality_weighted(self):
G = self.G
p = nx.eigenvector_centrality(G)
for (a, b) in zip(list(p.values()), self.G.evc):
for a, b in zip(list(p.values()), self.G.evc):
assert a == pytest.approx(b, abs=1e-4)
def test_eigenvector_centrality_weighted_numpy(self):
G = self.G
p = nx.eigenvector_centrality_numpy(G)
for (a, b) in zip(list(p.values()), self.G.evc):
for a, b in zip(list(p.values()), self.G.evc):
assert a == pytest.approx(b, abs=1e-7)
def test_eigenvector_centrality_unweighted(self):
G = self.H
p = nx.eigenvector_centrality(G)
for (a, b) in zip(list(p.values()), self.G.evc):
for a, b in zip(list(p.values()), self.G.evc):
assert a == pytest.approx(b, abs=1e-4)
def test_eigenvector_centrality_unweighted_numpy(self):
G = self.H
p = nx.eigenvector_centrality_numpy(G)
for (a, b) in zip(list(p.values()), self.G.evc):
for a, b in zip(list(p.values()), self.G.evc):
assert a == pytest.approx(b, abs=1e-7)

View File

@@ -295,14 +295,14 @@ class TestKatzCentralityDirected:
G = self.G
alpha = self.G.alpha
p = nx.katz_centrality(G, alpha, weight="weight")
for (a, b) in zip(list(p.values()), self.G.evc):
for a, b in zip(list(p.values()), self.G.evc):
assert a == pytest.approx(b, abs=1e-7)
def test_katz_centrality_unweighted(self):
H = self.H
alpha = self.H.alpha
p = nx.katz_centrality(H, alpha, weight="weight")
for (a, b) in zip(list(p.values()), self.H.evc):
for a, b in zip(list(p.values()), self.H.evc):
assert a == pytest.approx(b, abs=1e-7)
@@ -318,14 +318,14 @@ class TestKatzCentralityDirectedNumpy(TestKatzCentralityDirected):
G = self.G
alpha = self.G.alpha
p = nx.katz_centrality_numpy(G, alpha, weight="weight")
for (a, b) in zip(list(p.values()), self.G.evc):
for a, b in zip(list(p.values()), self.G.evc):
assert a == pytest.approx(b, abs=1e-7)
def test_katz_centrality_unweighted(self):
H = self.H
alpha = self.H.alpha
p = nx.katz_centrality_numpy(H, alpha, weight="weight")
for (a, b) in zip(list(p.values()), self.H.evc):
for a, b in zip(list(p.values()), self.H.evc):
assert a == pytest.approx(b, abs=1e-7)

View File

@@ -0,0 +1,189 @@
import pytest
import networkx as nx
np = pytest.importorskip("numpy")
sp = pytest.importorskip("scipy")
def test_laplacian_centrality_E():
E = nx.Graph()
E.add_weighted_edges_from(
[(0, 1, 4), (4, 5, 1), (0, 2, 2), (2, 1, 1), (1, 3, 2), (1, 4, 2)]
)
d = nx.laplacian_centrality(E)
exact = {
0: 0.700000,
1: 0.900000,
2: 0.280000,
3: 0.220000,
4: 0.260000,
5: 0.040000,
}
for n, dc in d.items():
assert exact[n] == pytest.approx(dc, abs=1e-7)
# Check not normalized
full_energy = 200
dnn = nx.laplacian_centrality(E, normalized=False)
for n, dc in dnn.items():
assert exact[n] * full_energy == pytest.approx(dc, abs=1e-7)
# Check unweighted not-normalized version
duw_nn = nx.laplacian_centrality(E, normalized=False, weight=None)
print(duw_nn)
exact_uw_nn = {
0: 18,
1: 34,
2: 18,
3: 10,
4: 16,
5: 6,
}
for n, dc in duw_nn.items():
assert exact_uw_nn[n] == pytest.approx(dc, abs=1e-7)
# Check unweighted version
duw = nx.laplacian_centrality(E, weight=None)
full_energy = 42
for n, dc in duw.items():
assert exact_uw_nn[n] / full_energy == pytest.approx(dc, abs=1e-7)
def test_laplacian_centrality_KC():
KC = nx.karate_club_graph()
d = nx.laplacian_centrality(KC)
exact = {
0: 0.2543593,
1: 0.1724524,
2: 0.2166053,
3: 0.0964646,
4: 0.0350344,
5: 0.0571109,
6: 0.0540713,
7: 0.0788674,
8: 0.1222204,
9: 0.0217565,
10: 0.0308751,
11: 0.0215965,
12: 0.0174372,
13: 0.118861,
14: 0.0366341,
15: 0.0548712,
16: 0.0172772,
17: 0.0191969,
18: 0.0225564,
19: 0.0331147,
20: 0.0279955,
21: 0.0246361,
22: 0.0382339,
23: 0.1294193,
24: 0.0227164,
25: 0.0644697,
26: 0.0281555,
27: 0.075188,
28: 0.0364742,
29: 0.0707087,
30: 0.0708687,
31: 0.131019,
32: 0.2370821,
33: 0.3066709,
}
for n, dc in d.items():
assert exact[n] == pytest.approx(dc, abs=1e-7)
# Check not normalized
full_energy = 12502
dnn = nx.laplacian_centrality(KC, normalized=False)
for n, dc in dnn.items():
assert exact[n] * full_energy == pytest.approx(dc, abs=1e-3)
def test_laplacian_centrality_K():
K = nx.krackhardt_kite_graph()
d = nx.laplacian_centrality(K)
exact = {
0: 0.3010753,
1: 0.3010753,
2: 0.2258065,
3: 0.483871,
4: 0.2258065,
5: 0.3870968,
6: 0.3870968,
7: 0.1935484,
8: 0.0752688,
9: 0.0322581,
}
for n, dc in d.items():
assert exact[n] == pytest.approx(dc, abs=1e-7)
# Check not normalized
full_energy = 186
dnn = nx.laplacian_centrality(K, normalized=False)
for n, dc in dnn.items():
assert exact[n] * full_energy == pytest.approx(dc, abs=1e-3)
def test_laplacian_centrality_P3():
P3 = nx.path_graph(3)
d = nx.laplacian_centrality(P3)
exact = {0: 0.6, 1: 1.0, 2: 0.6}
for n, dc in d.items():
assert exact[n] == pytest.approx(dc, abs=1e-7)
def test_laplacian_centrality_K5():
K5 = nx.complete_graph(5)
d = nx.laplacian_centrality(K5)
exact = {0: 0.52, 1: 0.52, 2: 0.52, 3: 0.52, 4: 0.52}
for n, dc in d.items():
assert exact[n] == pytest.approx(dc, abs=1e-7)
def test_laplacian_centrality_FF():
FF = nx.florentine_families_graph()
d = nx.laplacian_centrality(FF)
exact = {
"Acciaiuoli": 0.0804598,
"Medici": 0.4022989,
"Castellani": 0.1724138,
"Peruzzi": 0.183908,
"Strozzi": 0.2528736,
"Barbadori": 0.137931,
"Ridolfi": 0.2183908,
"Tornabuoni": 0.2183908,
"Albizzi": 0.1954023,
"Salviati": 0.1149425,
"Pazzi": 0.0344828,
"Bischeri": 0.1954023,
"Guadagni": 0.2298851,
"Ginori": 0.045977,
"Lamberteschi": 0.0574713,
}
for n, dc in d.items():
assert exact[n] == pytest.approx(dc, abs=1e-7)
def test_laplacian_centrality_DG():
DG = nx.DiGraph([(0, 5), (1, 5), (2, 5), (3, 5), (4, 5), (5, 6), (5, 7), (5, 8)])
d = nx.laplacian_centrality(DG)
exact = {
0: 0.2123352,
5: 0.515391,
1: 0.2123352,
2: 0.2123352,
3: 0.2123352,
4: 0.2123352,
6: 0.2952031,
7: 0.2952031,
8: 0.2952031,
}
for n, dc in d.items():
assert exact[n] == pytest.approx(dc, abs=1e-7)
# Check not normalized
full_energy = 9.50704
dnn = nx.laplacian_centrality(DG, normalized=False)
for n, dc in dnn.items():
assert exact[n] * full_energy == pytest.approx(dc, abs=1e-4)

View File

@@ -6,7 +6,6 @@ import networkx as nx
class TestLoadCentrality:
@classmethod
def setup_class(cls):
G = nx.Graph()
G.add_edge(0, 1, weight=3)
G.add_edge(0, 2, weight=2)