Fixed database typo and removed unnecessary class identifier.
This commit is contained in:
parent
00ad49a143
commit
45fb349a7d
5098 changed files with 952558 additions and 85 deletions
|
@ -0,0 +1,86 @@
|
|||
r""" This module provides functions and operations for bipartite
|
||||
graphs. Bipartite graphs `B = (U, V, E)` have two node sets `U,V` and edges in
|
||||
`E` that only connect nodes from opposite sets. It is common in the literature
|
||||
to use an spatial analogy referring to the two node sets as top and bottom nodes.
|
||||
|
||||
The bipartite algorithms are not imported into the networkx namespace
|
||||
at the top level so the easiest way to use them is with:
|
||||
|
||||
>>> from networkx.algorithms import bipartite
|
||||
|
||||
NetworkX does not have a custom bipartite graph class but the Graph()
|
||||
or DiGraph() classes can be used to represent bipartite graphs. However,
|
||||
you have to keep track of which set each node belongs to, and make
|
||||
sure that there is no edge between nodes of the same set. The convention used
|
||||
in NetworkX is to use a node attribute named `bipartite` with values 0 or 1 to
|
||||
identify the sets each node belongs to. This convention is not enforced in
|
||||
the source code of bipartite functions, it's only a recommendation.
|
||||
|
||||
For example:
|
||||
|
||||
>>> B = nx.Graph()
|
||||
>>> # Add nodes with the node attribute "bipartite"
|
||||
>>> B.add_nodes_from([1, 2, 3, 4], bipartite=0)
|
||||
>>> B.add_nodes_from(["a", "b", "c"], bipartite=1)
|
||||
>>> # Add edges only between nodes of opposite node sets
|
||||
>>> B.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
|
||||
|
||||
Many algorithms of the bipartite module of NetworkX require, as an argument, a
|
||||
container with all the nodes that belong to one set, in addition to the bipartite
|
||||
graph `B`. The functions in the bipartite package do not check that the node set
|
||||
is actually correct nor that the input graph is actually bipartite.
|
||||
If `B` is connected, you can find the two node sets using a two-coloring
|
||||
algorithm:
|
||||
|
||||
>>> nx.is_connected(B)
|
||||
True
|
||||
>>> bottom_nodes, top_nodes = bipartite.sets(B)
|
||||
|
||||
However, if the input graph is not connected, there are more than one possible
|
||||
colorations. This is the reason why we require the user to pass a container
|
||||
with all nodes of one bipartite node set as an argument to most bipartite
|
||||
functions. In the face of ambiguity, we refuse the temptation to guess and
|
||||
raise an :exc:`AmbiguousSolution <networkx.AmbiguousSolution>`
|
||||
Exception if the input graph for
|
||||
:func:`bipartite.sets <networkx.algorithms.bipartite.basic.sets>`
|
||||
is disconnected.
|
||||
|
||||
Using the `bipartite` node attribute, you can easily get the two node sets:
|
||||
|
||||
>>> top_nodes = {n for n, d in B.nodes(data=True) if d["bipartite"] == 0}
|
||||
>>> bottom_nodes = set(B) - top_nodes
|
||||
|
||||
So you can easily use the bipartite algorithms that require, as an argument, a
|
||||
container with all nodes that belong to one node set:
|
||||
|
||||
>>> print(round(bipartite.density(B, bottom_nodes), 2))
|
||||
0.5
|
||||
>>> G = bipartite.projected_graph(B, top_nodes)
|
||||
|
||||
All bipartite graph generators in NetworkX build bipartite graphs with the
|
||||
`bipartite` node attribute. Thus, you can use the same approach:
|
||||
|
||||
>>> RB = bipartite.random_graph(5, 7, 0.2)
|
||||
>>> RB_top = {n for n, d in RB.nodes(data=True) if d["bipartite"] == 0}
|
||||
>>> RB_bottom = set(RB) - RB_top
|
||||
>>> list(RB_top)
|
||||
[0, 1, 2, 3, 4]
|
||||
>>> list(RB_bottom)
|
||||
[5, 6, 7, 8, 9, 10, 11]
|
||||
|
||||
For other bipartite graph generators see
|
||||
:mod:`Generators <networkx.algorithms.bipartite.generators>`.
|
||||
|
||||
"""
|
||||
|
||||
from networkx.algorithms.bipartite.basic import *
|
||||
from networkx.algorithms.bipartite.centrality import *
|
||||
from networkx.algorithms.bipartite.cluster import *
|
||||
from networkx.algorithms.bipartite.covering import *
|
||||
from networkx.algorithms.bipartite.edgelist import *
|
||||
from networkx.algorithms.bipartite.matching import *
|
||||
from networkx.algorithms.bipartite.matrix import *
|
||||
from networkx.algorithms.bipartite.projection import *
|
||||
from networkx.algorithms.bipartite.redundancy import *
|
||||
from networkx.algorithms.bipartite.spectral import *
|
||||
from networkx.algorithms.bipartite.generators import *
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
303
venv/Lib/site-packages/networkx/algorithms/bipartite/basic.py
Normal file
303
venv/Lib/site-packages/networkx/algorithms/bipartite/basic.py
Normal file
|
@ -0,0 +1,303 @@
|
|||
"""
|
||||
==========================
|
||||
Bipartite Graph Algorithms
|
||||
==========================
|
||||
"""
|
||||
import networkx as nx
|
||||
from networkx.algorithms.components import connected_components
|
||||
|
||||
__all__ = [
|
||||
"is_bipartite",
|
||||
"is_bipartite_node_set",
|
||||
"color",
|
||||
"sets",
|
||||
"density",
|
||||
"degrees",
|
||||
]
|
||||
|
||||
|
||||
def color(G):
|
||||
"""Returns a two-coloring of the graph.
|
||||
|
||||
Raises an exception if the graph is not bipartite.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
color : dictionary
|
||||
A dictionary keyed by node with a 1 or 0 as data for each node color.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If the graph is not two-colorable.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> c = bipartite.color(G)
|
||||
>>> print(c)
|
||||
{0: 1, 1: 0, 2: 1, 3: 0}
|
||||
|
||||
You can use this to set a node attribute indicating the biparite set:
|
||||
|
||||
>>> nx.set_node_attributes(G, c, "bipartite")
|
||||
>>> print(G.nodes[0]["bipartite"])
|
||||
1
|
||||
>>> print(G.nodes[1]["bipartite"])
|
||||
0
|
||||
"""
|
||||
if G.is_directed():
|
||||
import itertools
|
||||
|
||||
def neighbors(v):
|
||||
return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)])
|
||||
|
||||
else:
|
||||
neighbors = G.neighbors
|
||||
|
||||
color = {}
|
||||
for n in G: # handle disconnected graphs
|
||||
if n in color or len(G[n]) == 0: # skip isolates
|
||||
continue
|
||||
queue = [n]
|
||||
color[n] = 1 # nodes seen with color (1 or 0)
|
||||
while queue:
|
||||
v = queue.pop()
|
||||
c = 1 - color[v] # opposite color of node v
|
||||
for w in neighbors(v):
|
||||
if w in color:
|
||||
if color[w] == color[v]:
|
||||
raise nx.NetworkXError("Graph is not bipartite.")
|
||||
else:
|
||||
color[w] = c
|
||||
queue.append(w)
|
||||
# color isolates with 0
|
||||
color.update(dict.fromkeys(nx.isolates(G), 0))
|
||||
return color
|
||||
|
||||
|
||||
def is_bipartite(G):
|
||||
""" Returns True if graph G is bipartite, False if not.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> print(bipartite.is_bipartite(G))
|
||||
True
|
||||
|
||||
See Also
|
||||
--------
|
||||
color, is_bipartite_node_set
|
||||
"""
|
||||
try:
|
||||
color(G)
|
||||
return True
|
||||
except nx.NetworkXError:
|
||||
return False
|
||||
|
||||
|
||||
def is_bipartite_node_set(G, nodes):
|
||||
"""Returns True if nodes and G/nodes are a bipartition of G.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
nodes: list or container
|
||||
Check if nodes are a one of a bipartite set.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> X = set([1, 3])
|
||||
>>> bipartite.is_bipartite_node_set(G, X)
|
||||
True
|
||||
|
||||
Notes
|
||||
-----
|
||||
For connected graphs the bipartite sets are unique. This function handles
|
||||
disconnected graphs.
|
||||
"""
|
||||
S = set(nodes)
|
||||
for CC in (G.subgraph(c).copy() for c in connected_components(G)):
|
||||
X, Y = sets(CC)
|
||||
if not (
|
||||
(X.issubset(S) and Y.isdisjoint(S)) or (Y.issubset(S) and X.isdisjoint(S))
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def sets(G, top_nodes=None):
|
||||
"""Returns bipartite node sets of graph G.
|
||||
|
||||
Raises an exception if the graph is not bipartite or if the input
|
||||
graph is disconnected and thus more than one valid solution exists.
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
top_nodes : container, optional
|
||||
Container with all nodes in one bipartite node set. If not supplied
|
||||
it will be computed. But if more than one solution exists an exception
|
||||
will be raised.
|
||||
|
||||
Returns
|
||||
-------
|
||||
X : set
|
||||
Nodes from one side of the bipartite graph.
|
||||
Y : set
|
||||
Nodes from the other side.
|
||||
|
||||
Raises
|
||||
------
|
||||
AmbiguousSolution
|
||||
Raised if the input bipartite graph is disconnected and no container
|
||||
with all nodes in one bipartite set is provided. When determining
|
||||
the nodes in each bipartite set more than one valid solution is
|
||||
possible if the input graph is disconnected.
|
||||
NetworkXError
|
||||
Raised if the input graph is not bipartite.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> X, Y = bipartite.sets(G)
|
||||
>>> list(X)
|
||||
[0, 2]
|
||||
>>> list(Y)
|
||||
[1, 3]
|
||||
|
||||
See Also
|
||||
--------
|
||||
color
|
||||
|
||||
"""
|
||||
if G.is_directed():
|
||||
is_connected = nx.is_weakly_connected
|
||||
else:
|
||||
is_connected = nx.is_connected
|
||||
if top_nodes is not None:
|
||||
X = set(top_nodes)
|
||||
Y = set(G) - X
|
||||
else:
|
||||
if not is_connected(G):
|
||||
msg = "Disconnected graph: Ambiguous solution for bipartite sets."
|
||||
raise nx.AmbiguousSolution(msg)
|
||||
c = color(G)
|
||||
X = {n for n, is_top in c.items() if is_top}
|
||||
Y = {n for n, is_top in c.items() if not is_top}
|
||||
return (X, Y)
|
||||
|
||||
|
||||
def density(B, nodes):
|
||||
"""Returns density of bipartite graph B.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
nodes: list or container
|
||||
Nodes in one node set of the bipartite graph.
|
||||
|
||||
Returns
|
||||
-------
|
||||
d : float
|
||||
The bipartite density
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.complete_bipartite_graph(3, 2)
|
||||
>>> X = set([0, 1, 2])
|
||||
>>> bipartite.density(G, X)
|
||||
1.0
|
||||
>>> Y = set([3, 4])
|
||||
>>> bipartite.density(G, Y)
|
||||
1.0
|
||||
|
||||
Notes
|
||||
-----
|
||||
The container of nodes passed as argument must contain all nodes
|
||||
in one of the two bipartite node sets to avoid ambiguity in the
|
||||
case of disconnected graphs.
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
color
|
||||
"""
|
||||
n = len(B)
|
||||
m = nx.number_of_edges(B)
|
||||
nb = len(nodes)
|
||||
nt = n - nb
|
||||
if m == 0: # includes cases n==0 and n==1
|
||||
d = 0.0
|
||||
else:
|
||||
if B.is_directed():
|
||||
d = m / (2.0 * float(nb * nt))
|
||||
else:
|
||||
d = m / float(nb * nt)
|
||||
return d
|
||||
|
||||
|
||||
def degrees(B, nodes, weight=None):
|
||||
"""Returns the degrees of the two node sets in the bipartite graph B.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
nodes: list or container
|
||||
Nodes in one node set of the bipartite graph.
|
||||
|
||||
weight : string or None, optional (default=None)
|
||||
The edge attribute that holds the numerical value used as a weight.
|
||||
If None, then each edge has weight 1.
|
||||
The degree is the sum of the edge weights adjacent to the node.
|
||||
|
||||
Returns
|
||||
-------
|
||||
(degX,degY) : tuple of dictionaries
|
||||
The degrees of the two bipartite sets as dictionaries keyed by node.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.complete_bipartite_graph(3, 2)
|
||||
>>> Y = set([3, 4])
|
||||
>>> degX, degY = bipartite.degrees(G, Y)
|
||||
>>> dict(degX)
|
||||
{0: 2, 1: 2, 2: 2}
|
||||
|
||||
Notes
|
||||
-----
|
||||
The container of nodes passed as argument must contain all nodes
|
||||
in one of the two bipartite node sets to avoid ambiguity in the
|
||||
case of disconnected graphs.
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
color, density
|
||||
"""
|
||||
bottom = set(nodes)
|
||||
top = set(B) - bottom
|
||||
return (B.degree(top, weight), B.degree(bottom, weight))
|
|
@ -0,0 +1,268 @@
|
|||
import networkx as nx
|
||||
|
||||
__all__ = ["degree_centrality", "betweenness_centrality", "closeness_centrality"]
|
||||
|
||||
|
||||
def degree_centrality(G, nodes):
|
||||
r"""Compute the degree centrality for nodes in a bipartite network.
|
||||
|
||||
The degree centrality for a node `v` is the fraction of nodes
|
||||
connected to it.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A bipartite network
|
||||
|
||||
nodes : list or container
|
||||
Container with all nodes in one bipartite node set.
|
||||
|
||||
Returns
|
||||
-------
|
||||
centrality : dictionary
|
||||
Dictionary keyed by node with bipartite degree centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality,
|
||||
closeness_centrality,
|
||||
sets,
|
||||
is_bipartite
|
||||
|
||||
Notes
|
||||
-----
|
||||
The nodes input parameter must contain all nodes in one bipartite node set,
|
||||
but the dictionary returned contains all nodes from both bipartite node
|
||||
sets. See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
For unipartite networks, the degree centrality values are
|
||||
normalized by dividing by the maximum possible degree (which is
|
||||
`n-1` where `n` is the number of nodes in G).
|
||||
|
||||
In the bipartite case, the maximum possible degree of a node in a
|
||||
bipartite node set is the number of nodes in the opposite node set
|
||||
[1]_. The degree centrality for a node `v` in the bipartite
|
||||
sets `U` with `n` nodes and `V` with `m` nodes is
|
||||
|
||||
.. math::
|
||||
|
||||
d_{v} = \frac{deg(v)}{m}, \mbox{for} v \in U ,
|
||||
|
||||
d_{v} = \frac{deg(v)}{n}, \mbox{for} v \in V ,
|
||||
|
||||
|
||||
where `deg(v)` is the degree of node `v`.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
|
||||
Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
||||
of Social Network Analysis. Sage Publications.
|
||||
http://www.steveborgatti.com/research/publications/bhaffiliations.pdf
|
||||
"""
|
||||
top = set(nodes)
|
||||
bottom = set(G) - top
|
||||
s = 1.0 / len(bottom)
|
||||
centrality = {n: d * s for n, d in G.degree(top)}
|
||||
s = 1.0 / len(top)
|
||||
centrality.update({n: d * s for n, d in G.degree(bottom)})
|
||||
return centrality
|
||||
|
||||
|
||||
def betweenness_centrality(G, nodes):
|
||||
r"""Compute betweenness centrality for nodes in a bipartite network.
|
||||
|
||||
Betweenness centrality of a node `v` is the sum of the
|
||||
fraction of all-pairs shortest paths that pass through `v`.
|
||||
|
||||
Values of betweenness are normalized by the maximum possible
|
||||
value which for bipartite graphs is limited by the relative size
|
||||
of the two node sets [1]_.
|
||||
|
||||
Let `n` be the number of nodes in the node set `U` and
|
||||
`m` be the number of nodes in the node set `V`, then
|
||||
nodes in `U` are normalized by dividing by
|
||||
|
||||
.. math::
|
||||
|
||||
\frac{1}{2} [m^2 (s + 1)^2 + m (s + 1)(2t - s - 1) - t (2s - t + 3)] ,
|
||||
|
||||
where
|
||||
|
||||
.. math::
|
||||
|
||||
s = (n - 1) \div m , t = (n - 1) \mod m ,
|
||||
|
||||
and nodes in `V` are normalized by dividing by
|
||||
|
||||
.. math::
|
||||
|
||||
\frac{1}{2} [n^2 (p + 1)^2 + n (p + 1)(2r - p - 1) - r (2p - r + 3)] ,
|
||||
|
||||
where,
|
||||
|
||||
.. math::
|
||||
|
||||
p = (m - 1) \div n , r = (m - 1) \mod n .
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A bipartite graph
|
||||
|
||||
nodes : list or container
|
||||
Container with all nodes in one bipartite node set.
|
||||
|
||||
Returns
|
||||
-------
|
||||
betweenness : dictionary
|
||||
Dictionary keyed by node with bipartite betweenness centrality
|
||||
as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
degree_centrality,
|
||||
closeness_centrality,
|
||||
sets,
|
||||
is_bipartite
|
||||
|
||||
Notes
|
||||
-----
|
||||
The nodes input parameter must contain all nodes in one bipartite node set,
|
||||
but the dictionary returned contains all nodes from both node sets.
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
|
||||
Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
||||
of Social Network Analysis. Sage Publications.
|
||||
http://www.steveborgatti.com/research/publications/bhaffiliations.pdf
|
||||
"""
|
||||
top = set(nodes)
|
||||
bottom = set(G) - top
|
||||
n = float(len(top))
|
||||
m = float(len(bottom))
|
||||
s = (n - 1) // m
|
||||
t = (n - 1) % m
|
||||
bet_max_top = (
|
||||
((m ** 2) * ((s + 1) ** 2))
|
||||
+ (m * (s + 1) * (2 * t - s - 1))
|
||||
- (t * ((2 * s) - t + 3))
|
||||
) / 2.0
|
||||
p = (m - 1) // n
|
||||
r = (m - 1) % n
|
||||
bet_max_bot = (
|
||||
((n ** 2) * ((p + 1) ** 2))
|
||||
+ (n * (p + 1) * (2 * r - p - 1))
|
||||
- (r * ((2 * p) - r + 3))
|
||||
) / 2.0
|
||||
betweenness = nx.betweenness_centrality(G, normalized=False, weight=None)
|
||||
for node in top:
|
||||
betweenness[node] /= bet_max_top
|
||||
for node in bottom:
|
||||
betweenness[node] /= bet_max_bot
|
||||
return betweenness
|
||||
|
||||
|
||||
def closeness_centrality(G, nodes, normalized=True):
|
||||
r"""Compute the closeness centrality for nodes in a bipartite network.
|
||||
|
||||
The closeness of a node is the distance to all other nodes in the
|
||||
graph or in the case that the graph is not connected to all other nodes
|
||||
in the connected component containing that node.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A bipartite network
|
||||
|
||||
nodes : list or container
|
||||
Container with all nodes in one bipartite node set.
|
||||
|
||||
normalized : bool, optional
|
||||
If True (default) normalize by connected component size.
|
||||
|
||||
Returns
|
||||
-------
|
||||
closeness : dictionary
|
||||
Dictionary keyed by node with bipartite closeness centrality
|
||||
as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality,
|
||||
degree_centrality
|
||||
sets,
|
||||
is_bipartite
|
||||
|
||||
Notes
|
||||
-----
|
||||
The nodes input parameter must contain all nodes in one bipartite node set,
|
||||
but the dictionary returned contains all nodes from both node sets.
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
|
||||
Closeness centrality is normalized by the minimum distance possible.
|
||||
In the bipartite case the minimum distance for a node in one bipartite
|
||||
node set is 1 from all nodes in the other node set and 2 from all
|
||||
other nodes in its own set [1]_. Thus the closeness centrality
|
||||
for node `v` in the two bipartite sets `U` with
|
||||
`n` nodes and `V` with `m` nodes is
|
||||
|
||||
.. math::
|
||||
|
||||
c_{v} = \frac{m + 2(n - 1)}{d}, \mbox{for} v \in U,
|
||||
|
||||
c_{v} = \frac{n + 2(m - 1)}{d}, \mbox{for} v \in V,
|
||||
|
||||
where `d` is the sum of the distances from `v` to all
|
||||
other nodes.
|
||||
|
||||
Higher values of closeness indicate higher centrality.
|
||||
|
||||
As in the unipartite case, setting normalized=True causes the
|
||||
values to normalized further to n-1 / size(G)-1 where n is the
|
||||
number of nodes in the connected part of graph containing the
|
||||
node. If the graph is not completely connected, this algorithm
|
||||
computes the closeness centrality for each connected part
|
||||
separately.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
|
||||
Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
||||
of Social Network Analysis. Sage Publications.
|
||||
http://www.steveborgatti.com/research/publications/bhaffiliations.pdf
|
||||
"""
|
||||
closeness = {}
|
||||
path_length = nx.single_source_shortest_path_length
|
||||
top = set(nodes)
|
||||
bottom = set(G) - top
|
||||
n = float(len(top))
|
||||
m = float(len(bottom))
|
||||
for node in top:
|
||||
sp = dict(path_length(G, node))
|
||||
totsp = sum(sp.values())
|
||||
if totsp > 0.0 and len(G) > 1:
|
||||
closeness[node] = (m + 2 * (n - 1)) / totsp
|
||||
if normalized:
|
||||
s = (len(sp) - 1.0) / (len(G) - 1)
|
||||
closeness[node] *= s
|
||||
else:
|
||||
closeness[n] = 0.0
|
||||
for node in bottom:
|
||||
sp = dict(path_length(G, node))
|
||||
totsp = sum(sp.values())
|
||||
if totsp > 0.0 and len(G) > 1:
|
||||
closeness[node] = (n + 2 * (m - 1)) / totsp
|
||||
if normalized:
|
||||
s = (len(sp) - 1.0) / (len(G) - 1)
|
||||
closeness[node] *= s
|
||||
else:
|
||||
closeness[n] = 0.0
|
||||
return closeness
|
276
venv/Lib/site-packages/networkx/algorithms/bipartite/cluster.py
Normal file
276
venv/Lib/site-packages/networkx/algorithms/bipartite/cluster.py
Normal file
|
@ -0,0 +1,276 @@
|
|||
"""Functions for computing clustering of pairs
|
||||
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import networkx as nx
|
||||
|
||||
__all__ = [
|
||||
"clustering",
|
||||
"average_clustering",
|
||||
"latapy_clustering",
|
||||
"robins_alexander_clustering",
|
||||
]
|
||||
|
||||
|
||||
def cc_dot(nu, nv):
|
||||
return float(len(nu & nv)) / len(nu | nv)
|
||||
|
||||
|
||||
def cc_max(nu, nv):
|
||||
return float(len(nu & nv)) / max(len(nu), len(nv))
|
||||
|
||||
|
||||
def cc_min(nu, nv):
|
||||
return float(len(nu & nv)) / min(len(nu), len(nv))
|
||||
|
||||
|
||||
modes = {"dot": cc_dot, "min": cc_min, "max": cc_max}
|
||||
|
||||
|
||||
def latapy_clustering(G, nodes=None, mode="dot"):
|
||||
r"""Compute a bipartite clustering coefficient for nodes.
|
||||
|
||||
The bipartie clustering coefficient is a measure of local density
|
||||
of connections defined as [1]_:
|
||||
|
||||
.. math::
|
||||
|
||||
c_u = \frac{\sum_{v \in N(N(u))} c_{uv} }{|N(N(u))|}
|
||||
|
||||
where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`,
|
||||
and `c_{uv}` is the pairwise clustering coefficient between nodes
|
||||
`u` and `v`.
|
||||
|
||||
The mode selects the function for `c_{uv}` which can be:
|
||||
|
||||
`dot`:
|
||||
|
||||
.. math::
|
||||
|
||||
c_{uv}=\frac{|N(u)\cap N(v)|}{|N(u) \cup N(v)|}
|
||||
|
||||
`min`:
|
||||
|
||||
.. math::
|
||||
|
||||
c_{uv}=\frac{|N(u)\cap N(v)|}{min(|N(u)|,|N(v)|)}
|
||||
|
||||
`max`:
|
||||
|
||||
.. math::
|
||||
|
||||
c_{uv}=\frac{|N(u)\cap N(v)|}{max(|N(u)|,|N(v)|)}
|
||||
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A bipartite graph
|
||||
|
||||
nodes : list or iterable (optional)
|
||||
Compute bipartite clustering for these nodes. The default
|
||||
is all nodes in G.
|
||||
|
||||
mode : string
|
||||
The pariwise bipartite clustering method to be used in the computation.
|
||||
It must be "dot", "max", or "min".
|
||||
|
||||
Returns
|
||||
-------
|
||||
clustering : dictionary
|
||||
A dictionary keyed by node with the clustering coefficient value.
|
||||
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4) # path graphs are bipartite
|
||||
>>> c = bipartite.clustering(G)
|
||||
>>> c[0]
|
||||
0.5
|
||||
>>> c = bipartite.clustering(G, mode="min")
|
||||
>>> c[0]
|
||||
1.0
|
||||
|
||||
See Also
|
||||
--------
|
||||
robins_alexander_clustering
|
||||
square_clustering
|
||||
average_clustering
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
|
||||
Basic notions for the analysis of large two-mode networks.
|
||||
Social Networks 30(1), 31--48.
|
||||
"""
|
||||
if not nx.algorithms.bipartite.is_bipartite(G):
|
||||
raise nx.NetworkXError("Graph is not bipartite")
|
||||
|
||||
try:
|
||||
cc_func = modes[mode]
|
||||
except KeyError as e:
|
||||
raise nx.NetworkXError(
|
||||
"Mode for bipartite clustering must be: dot, min or max"
|
||||
) from e
|
||||
|
||||
if nodes is None:
|
||||
nodes = G
|
||||
ccs = {}
|
||||
for v in nodes:
|
||||
cc = 0.0
|
||||
nbrs2 = {u for nbr in G[v] for u in G[nbr]} - {v}
|
||||
for u in nbrs2:
|
||||
cc += cc_func(set(G[u]), set(G[v]))
|
||||
if cc > 0.0: # len(nbrs2)>0
|
||||
cc /= len(nbrs2)
|
||||
ccs[v] = cc
|
||||
return ccs
|
||||
|
||||
|
||||
clustering = latapy_clustering
|
||||
|
||||
|
||||
def average_clustering(G, nodes=None, mode="dot"):
|
||||
r"""Compute the average bipartite clustering coefficient.
|
||||
|
||||
A clustering coefficient for the whole graph is the average,
|
||||
|
||||
.. math::
|
||||
|
||||
C = \frac{1}{n}\sum_{v \in G} c_v,
|
||||
|
||||
where `n` is the number of nodes in `G`.
|
||||
|
||||
Similar measures for the two bipartite sets can be defined [1]_
|
||||
|
||||
.. math::
|
||||
|
||||
C_X = \frac{1}{|X|}\sum_{v \in X} c_v,
|
||||
|
||||
where `X` is a bipartite set of `G`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
a bipartite graph
|
||||
|
||||
nodes : list or iterable, optional
|
||||
A container of nodes to use in computing the average.
|
||||
The nodes should be either the entire graph (the default) or one of the
|
||||
bipartite sets.
|
||||
|
||||
mode : string
|
||||
The pariwise bipartite clustering method.
|
||||
It must be "dot", "max", or "min"
|
||||
|
||||
Returns
|
||||
-------
|
||||
clustering : float
|
||||
The average bipartite clustering for the given set of nodes or the
|
||||
entire graph if no nodes are specified.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.star_graph(3) # star graphs are bipartite
|
||||
>>> bipartite.average_clustering(G)
|
||||
0.75
|
||||
>>> X, Y = bipartite.sets(G)
|
||||
>>> bipartite.average_clustering(G, X)
|
||||
0.0
|
||||
>>> bipartite.average_clustering(G, Y)
|
||||
1.0
|
||||
|
||||
See Also
|
||||
--------
|
||||
clustering
|
||||
|
||||
Notes
|
||||
-----
|
||||
The container of nodes passed to this function must contain all of the nodes
|
||||
in one of the bipartite sets ("top" or "bottom") in order to compute
|
||||
the correct average bipartite clustering coefficients.
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
|
||||
Basic notions for the analysis of large two-mode networks.
|
||||
Social Networks 30(1), 31--48.
|
||||
"""
|
||||
if nodes is None:
|
||||
nodes = G
|
||||
ccs = latapy_clustering(G, nodes=nodes, mode=mode)
|
||||
return float(sum(ccs[v] for v in nodes)) / len(nodes)
|
||||
|
||||
|
||||
def robins_alexander_clustering(G):
|
||||
r"""Compute the bipartite clustering of G.
|
||||
|
||||
Robins and Alexander [1]_ defined bipartite clustering coefficient as
|
||||
four times the number of four cycles `C_4` divided by the number of
|
||||
three paths `L_3` in a bipartite graph:
|
||||
|
||||
.. math::
|
||||
|
||||
CC_4 = \frac{4 * C_4}{L_3}
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
a bipartite graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
clustering : float
|
||||
The Robins and Alexander bipartite clustering for the input graph.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.davis_southern_women_graph()
|
||||
>>> print(round(bipartite.robins_alexander_clustering(G), 3))
|
||||
0.468
|
||||
|
||||
See Also
|
||||
--------
|
||||
latapy_clustering
|
||||
square_clustering
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking
|
||||
directors: Network structure and distance in bipartite graphs.
|
||||
Computational & Mathematical Organization Theory 10(1), 69–94.
|
||||
|
||||
"""
|
||||
if G.order() < 4 or G.size() < 3:
|
||||
return 0
|
||||
L_3 = _threepaths(G)
|
||||
if L_3 == 0:
|
||||
return 0
|
||||
C_4 = _four_cycles(G)
|
||||
return (4.0 * C_4) / L_3
|
||||
|
||||
|
||||
def _four_cycles(G):
|
||||
cycles = 0
|
||||
for v in G:
|
||||
for u, w in itertools.combinations(G[v], 2):
|
||||
cycles += len((set(G[u]) & set(G[w])) - {v})
|
||||
return cycles / 4
|
||||
|
||||
|
||||
def _threepaths(G):
|
||||
paths = 0
|
||||
for v in G:
|
||||
for u in G[v]:
|
||||
for w in set(G[u]) - {v}:
|
||||
paths += len(set(G[w]) - {v, u})
|
||||
# Divide by two because we count each three path twice
|
||||
# one for each possible starting point
|
||||
return paths / 2
|
|
@ -0,0 +1,55 @@
|
|||
""" Functions related to graph covers."""
|
||||
|
||||
from networkx.utils import not_implemented_for
|
||||
from networkx.algorithms.bipartite.matching import hopcroft_karp_matching
|
||||
from networkx.algorithms.covering import min_edge_cover as _min_edge_cover
|
||||
|
||||
__all__ = ["min_edge_cover"]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
def min_edge_cover(G, matching_algorithm=None):
|
||||
"""Returns a set of edges which constitutes
|
||||
the minimum edge cover of the graph.
|
||||
|
||||
The smallest edge cover can be found in polynomial time by finding
|
||||
a maximum matching and extending it greedily so that all nodes
|
||||
are covered.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
An undirected bipartite graph.
|
||||
|
||||
matching_algorithm : function
|
||||
A function that returns a maximum cardinality matching in a
|
||||
given bipartite graph. The function must take one input, the
|
||||
graph ``G``, and return a dictionary mapping each node to its
|
||||
mate. If not specified,
|
||||
:func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching`
|
||||
will be used. Other possibilities include
|
||||
:func:`~networkx.algorithms.bipartite.matching.eppstein_matching`,
|
||||
|
||||
Returns
|
||||
-------
|
||||
set
|
||||
A set of the edges in a minimum edge cover of the graph, given as
|
||||
pairs of nodes. It contains both the edges `(u, v)` and `(v, u)`
|
||||
for given nodes `u` and `v` among the edges of minimum edge cover.
|
||||
|
||||
Notes
|
||||
-----
|
||||
An edge cover of a graph is a set of edges such that every node of
|
||||
the graph is incident to at least one edge of the set.
|
||||
A minimum edge cover is an edge covering of smallest cardinality.
|
||||
|
||||
Due to its implementation, the worst-case running time of this algorithm
|
||||
is bounded by the worst-case running time of the function
|
||||
``matching_algorithm``.
|
||||
"""
|
||||
if G.order() == 0: # Special case for the empty graph
|
||||
return set()
|
||||
if matching_algorithm is None:
|
||||
matching_algorithm = hopcroft_karp_matching
|
||||
return _min_edge_cover(G, matching_algorithm=matching_algorithm)
|
357
venv/Lib/site-packages/networkx/algorithms/bipartite/edgelist.py
Normal file
357
venv/Lib/site-packages/networkx/algorithms/bipartite/edgelist.py
Normal file
|
@ -0,0 +1,357 @@
|
|||
"""
|
||||
********************
|
||||
Bipartite Edge Lists
|
||||
********************
|
||||
Read and write NetworkX graphs as bipartite edge lists.
|
||||
|
||||
Format
|
||||
------
|
||||
You can read or write three formats of edge lists with these functions.
|
||||
|
||||
Node pairs with no data::
|
||||
|
||||
1 2
|
||||
|
||||
Python dictionary as data::
|
||||
|
||||
1 2 {'weight':7, 'color':'green'}
|
||||
|
||||
Arbitrary data::
|
||||
|
||||
1 2 7 green
|
||||
|
||||
For each edge (u, v) the node u is assigned to part 0 and the node v to part 1.
|
||||
"""
|
||||
__all__ = ["generate_edgelist", "write_edgelist", "parse_edgelist", "read_edgelist"]
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import open_file, not_implemented_for
|
||||
|
||||
|
||||
@open_file(1, mode="wb")
|
||||
def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="utf-8"):
|
||||
"""Write a bipartite graph as a list of edges.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : Graph
|
||||
A NetworkX bipartite graph
|
||||
path : file or string
|
||||
File or filename to write. If a file is provided, it must be
|
||||
opened in 'wb' mode. Filenames ending in .gz or .bz2 will be compressed.
|
||||
comments : string, optional
|
||||
The character used to indicate the start of a comment
|
||||
delimiter : string, optional
|
||||
The string used to separate values. The default is whitespace.
|
||||
data : bool or list, optional
|
||||
If False write no edge data.
|
||||
If True write a string representation of the edge data dictionary..
|
||||
If a list (or other iterable) is provided, write the keys specified
|
||||
in the list.
|
||||
encoding: string, optional
|
||||
Specify which encoding to use when writing file.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> G.add_nodes_from([0, 2], bipartite=0)
|
||||
>>> G.add_nodes_from([1, 3], bipartite=1)
|
||||
>>> nx.write_edgelist(G, "test.edgelist")
|
||||
>>> fh = open("test.edgelist", "wb")
|
||||
>>> nx.write_edgelist(G, fh)
|
||||
>>> nx.write_edgelist(G, "test.edgelist.gz")
|
||||
>>> nx.write_edgelist(G, "test.edgelist.gz", data=False)
|
||||
|
||||
>>> G = nx.Graph()
|
||||
>>> G.add_edge(1, 2, weight=7, color="red")
|
||||
>>> nx.write_edgelist(G, "test.edgelist", data=False)
|
||||
>>> nx.write_edgelist(G, "test.edgelist", data=["color"])
|
||||
>>> nx.write_edgelist(G, "test.edgelist", data=["color", "weight"])
|
||||
|
||||
See Also
|
||||
--------
|
||||
write_edgelist()
|
||||
generate_edgelist()
|
||||
"""
|
||||
for line in generate_edgelist(G, delimiter, data):
|
||||
line += "\n"
|
||||
path.write(line.encode(encoding))
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
def generate_edgelist(G, delimiter=" ", data=True):
|
||||
"""Generate a single line of the bipartite graph G in edge list format.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
The graph is assumed to have node attribute `part` set to 0,1 representing
|
||||
the two graph parts
|
||||
|
||||
delimiter : string, optional
|
||||
Separator for node labels
|
||||
|
||||
data : bool or list of keys
|
||||
If False generate no edge data. If True use a dictionary
|
||||
representation of edge data. If a list of keys use a list of data
|
||||
values corresponding to the keys.
|
||||
|
||||
Returns
|
||||
-------
|
||||
lines : string
|
||||
Lines of data in adjlist format.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> G.add_nodes_from([0, 2], bipartite=0)
|
||||
>>> G.add_nodes_from([1, 3], bipartite=1)
|
||||
>>> G[1][2]["weight"] = 3
|
||||
>>> G[2][3]["capacity"] = 12
|
||||
>>> for line in bipartite.generate_edgelist(G, data=False):
|
||||
... print(line)
|
||||
0 1
|
||||
2 1
|
||||
2 3
|
||||
|
||||
>>> for line in bipartite.generate_edgelist(G):
|
||||
... print(line)
|
||||
0 1 {}
|
||||
2 1 {'weight': 3}
|
||||
2 3 {'capacity': 12}
|
||||
|
||||
>>> for line in bipartite.generate_edgelist(G, data=["weight"]):
|
||||
... print(line)
|
||||
0 1
|
||||
2 1 3
|
||||
2 3
|
||||
"""
|
||||
try:
|
||||
part0 = [n for n, d in G.nodes.items() if d["bipartite"] == 0]
|
||||
except BaseException as e:
|
||||
raise AttributeError("Missing node attribute `bipartite`") from e
|
||||
if data is True or data is False:
|
||||
for n in part0:
|
||||
for e in G.edges(n, data=data):
|
||||
yield delimiter.join(map(str, e))
|
||||
else:
|
||||
for n in part0:
|
||||
for u, v, d in G.edges(n, data=True):
|
||||
e = [u, v]
|
||||
try:
|
||||
e.extend(d[k] for k in data)
|
||||
except KeyError:
|
||||
pass # missing data for this edge, should warn?
|
||||
yield delimiter.join(map(str, e))
|
||||
|
||||
|
||||
def parse_edgelist(
|
||||
lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True
|
||||
):
|
||||
"""Parse lines of an edge list representation of a bipartite graph.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
lines : list or iterator of strings
|
||||
Input data in edgelist format
|
||||
comments : string, optional
|
||||
Marker for comment lines
|
||||
delimiter : string, optional
|
||||
Separator for node labels
|
||||
create_using: NetworkX graph container, optional
|
||||
Use given NetworkX graph for holding nodes or edges.
|
||||
nodetype : Python type, optional
|
||||
Convert nodes to this type.
|
||||
data : bool or list of (label,type) tuples
|
||||
If False generate no edge data or if True use a dictionary
|
||||
representation of edge data or a list tuples specifying dictionary
|
||||
key names and types for edge data.
|
||||
|
||||
Returns
|
||||
-------
|
||||
G: NetworkX Graph
|
||||
The bipartite graph corresponding to lines
|
||||
|
||||
Examples
|
||||
--------
|
||||
Edgelist with no data:
|
||||
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> lines = ["1 2", "2 3", "3 4"]
|
||||
>>> G = bipartite.parse_edgelist(lines, nodetype=int)
|
||||
>>> sorted(G.nodes())
|
||||
[1, 2, 3, 4]
|
||||
>>> sorted(G.nodes(data=True))
|
||||
[(1, {'bipartite': 0}), (2, {'bipartite': 0}), (3, {'bipartite': 0}), (4, {'bipartite': 1})]
|
||||
>>> sorted(G.edges())
|
||||
[(1, 2), (2, 3), (3, 4)]
|
||||
|
||||
Edgelist with data in Python dictionary representation:
|
||||
|
||||
>>> lines = ["1 2 {'weight':3}", "2 3 {'weight':27}", "3 4 {'weight':3.0}"]
|
||||
>>> G = bipartite.parse_edgelist(lines, nodetype=int)
|
||||
>>> sorted(G.nodes())
|
||||
[1, 2, 3, 4]
|
||||
>>> sorted(G.edges(data=True))
|
||||
[(1, 2, {'weight': 3}), (2, 3, {'weight': 27}), (3, 4, {'weight': 3.0})]
|
||||
|
||||
Edgelist with data in a list:
|
||||
|
||||
>>> lines = ["1 2 3", "2 3 27", "3 4 3.0"]
|
||||
>>> G = bipartite.parse_edgelist(lines, nodetype=int, data=(("weight", float),))
|
||||
>>> sorted(G.nodes())
|
||||
[1, 2, 3, 4]
|
||||
>>> sorted(G.edges(data=True))
|
||||
[(1, 2, {'weight': 3.0}), (2, 3, {'weight': 27.0}), (3, 4, {'weight': 3.0})]
|
||||
|
||||
See Also
|
||||
--------
|
||||
"""
|
||||
from ast import literal_eval
|
||||
|
||||
G = nx.empty_graph(0, create_using)
|
||||
for line in lines:
|
||||
p = line.find(comments)
|
||||
if p >= 0:
|
||||
line = line[:p]
|
||||
if not len(line):
|
||||
continue
|
||||
# split line, should have 2 or more
|
||||
s = line.strip().split(delimiter)
|
||||
if len(s) < 2:
|
||||
continue
|
||||
u = s.pop(0)
|
||||
v = s.pop(0)
|
||||
d = s
|
||||
if nodetype is not None:
|
||||
try:
|
||||
u = nodetype(u)
|
||||
v = nodetype(v)
|
||||
except BaseException as e:
|
||||
raise TypeError(
|
||||
f"Failed to convert nodes {u},{v} " f"to type {nodetype}."
|
||||
) from e
|
||||
|
||||
if len(d) == 0 or data is False:
|
||||
# no data or data type specified
|
||||
edgedata = {}
|
||||
elif data is True:
|
||||
# no edge types specified
|
||||
try: # try to evaluate as dictionary
|
||||
edgedata = dict(literal_eval(" ".join(d)))
|
||||
except BaseException as e:
|
||||
raise TypeError(
|
||||
f"Failed to convert edge data ({d})" f"to dictionary."
|
||||
) from e
|
||||
else:
|
||||
# convert edge data to dictionary with specified keys and type
|
||||
if len(d) != len(data):
|
||||
raise IndexError(
|
||||
f"Edge data {d} and data_keys {data} are not the same length"
|
||||
)
|
||||
edgedata = {}
|
||||
for (edge_key, edge_type), edge_value in zip(data, d):
|
||||
try:
|
||||
edge_value = edge_type(edge_value)
|
||||
except BaseException as e:
|
||||
raise TypeError(
|
||||
f"Failed to convert {edge_key} data "
|
||||
f"{edge_value} to type {edge_type}."
|
||||
) from e
|
||||
edgedata.update({edge_key: edge_value})
|
||||
G.add_node(u, bipartite=0)
|
||||
G.add_node(v, bipartite=1)
|
||||
G.add_edge(u, v, **edgedata)
|
||||
return G
|
||||
|
||||
|
||||
@open_file(0, mode="rb")
|
||||
def read_edgelist(
|
||||
path,
|
||||
comments="#",
|
||||
delimiter=None,
|
||||
create_using=None,
|
||||
nodetype=None,
|
||||
data=True,
|
||||
edgetype=None,
|
||||
encoding="utf-8",
|
||||
):
|
||||
"""Read a bipartite graph from a list of edges.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : file or string
|
||||
File or filename to read. If a file is provided, it must be
|
||||
opened in 'rb' mode.
|
||||
Filenames ending in .gz or .bz2 will be uncompressed.
|
||||
comments : string, optional
|
||||
The character used to indicate the start of a comment.
|
||||
delimiter : string, optional
|
||||
The string used to separate values. The default is whitespace.
|
||||
create_using : Graph container, optional,
|
||||
Use specified container to build graph. The default is networkx.Graph,
|
||||
an undirected graph.
|
||||
nodetype : int, float, str, Python type, optional
|
||||
Convert node data from strings to specified type
|
||||
data : bool or list of (label,type) tuples
|
||||
Tuples specifying dictionary key names and types for edge data
|
||||
edgetype : int, float, str, Python type, optional OBSOLETE
|
||||
Convert edge data from strings to specified type and use as 'weight'
|
||||
encoding: string, optional
|
||||
Specify which encoding to use when reading file.
|
||||
|
||||
Returns
|
||||
-------
|
||||
G : graph
|
||||
A networkx Graph or other type specified with create_using
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> G.add_nodes_from([0, 2], bipartite=0)
|
||||
>>> G.add_nodes_from([1, 3], bipartite=1)
|
||||
>>> bipartite.write_edgelist(G, "test.edgelist")
|
||||
>>> G = bipartite.read_edgelist("test.edgelist")
|
||||
|
||||
>>> fh = open("test.edgelist", "rb")
|
||||
>>> G = bipartite.read_edgelist(fh)
|
||||
>>> fh.close()
|
||||
|
||||
>>> G = bipartite.read_edgelist("test.edgelist", nodetype=int)
|
||||
|
||||
Edgelist with data in a list:
|
||||
|
||||
>>> textline = "1 2 3"
|
||||
>>> fh = open("test.edgelist", "w")
|
||||
>>> d = fh.write(textline)
|
||||
>>> fh.close()
|
||||
>>> G = bipartite.read_edgelist(
|
||||
... "test.edgelist", nodetype=int, data=(("weight", float),)
|
||||
... )
|
||||
>>> list(G)
|
||||
[1, 2]
|
||||
>>> list(G.edges(data=True))
|
||||
[(1, 2, {'weight': 3.0})]
|
||||
|
||||
See parse_edgelist() for more examples of formatting.
|
||||
|
||||
See Also
|
||||
--------
|
||||
parse_edgelist
|
||||
|
||||
Notes
|
||||
-----
|
||||
Since nodes must be hashable, the function nodetype must return hashable
|
||||
types (e.g. int, float, str, frozenset - or tuples of those, etc.)
|
||||
"""
|
||||
lines = (line.decode(encoding) for line in path)
|
||||
return parse_edgelist(
|
||||
lines,
|
||||
comments=comments,
|
||||
delimiter=delimiter,
|
||||
create_using=create_using,
|
||||
nodetype=nodetype,
|
||||
data=data,
|
||||
)
|
|
@ -0,0 +1,595 @@
|
|||
"""
|
||||
Generators and functions for bipartite graphs.
|
||||
"""
|
||||
import math
|
||||
import numbers
|
||||
from functools import reduce
|
||||
import networkx as nx
|
||||
from networkx.utils import nodes_or_number, py_random_state
|
||||
|
||||
__all__ = [
|
||||
"configuration_model",
|
||||
"havel_hakimi_graph",
|
||||
"reverse_havel_hakimi_graph",
|
||||
"alternating_havel_hakimi_graph",
|
||||
"preferential_attachment_graph",
|
||||
"random_graph",
|
||||
"gnmk_random_graph",
|
||||
"complete_bipartite_graph",
|
||||
]
|
||||
|
||||
|
||||
@nodes_or_number([0, 1])
|
||||
def complete_bipartite_graph(n1, n2, create_using=None):
|
||||
"""Returns the complete bipartite graph `K_{n_1,n_2}`.
|
||||
|
||||
The graph is composed of two partitions with nodes 0 to (n1 - 1)
|
||||
in the first and nodes n1 to (n1 + n2 - 1) in the second.
|
||||
Each node in the first is connected to each node in the second.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
n1 : integer
|
||||
Number of nodes for node set A.
|
||||
n2 : integer
|
||||
Number of nodes for node set B.
|
||||
create_using : NetworkX graph instance, optional
|
||||
Return graph of this type.
|
||||
|
||||
Notes
|
||||
-----
|
||||
Node labels are the integers 0 to `n_1 + n_2 - 1`.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.complete_bipartite_graph
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using)
|
||||
if G.is_directed():
|
||||
raise nx.NetworkXError("Directed Graph not supported")
|
||||
|
||||
n1, top = n1
|
||||
n2, bottom = n2
|
||||
if isinstance(n2, numbers.Integral):
|
||||
bottom = [n1 + i for i in bottom]
|
||||
G.add_nodes_from(top, bipartite=0)
|
||||
G.add_nodes_from(bottom, bipartite=1)
|
||||
G.add_edges_from((u, v) for u in top for v in bottom)
|
||||
G.graph["name"] = f"complete_bipartite_graph({n1},{n2})"
|
||||
return G
|
||||
|
||||
|
||||
@py_random_state(3)
|
||||
def configuration_model(aseq, bseq, create_using=None, seed=None):
|
||||
"""Returns a random bipartite graph from two given degree sequences.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
aseq : list
|
||||
Degree sequence for node set A.
|
||||
bseq : list
|
||||
Degree sequence for node set B.
|
||||
create_using : NetworkX graph instance, optional
|
||||
Return graph of this type.
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
|
||||
The graph is composed of two partitions. Set A has nodes 0 to
|
||||
(len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
|
||||
Nodes from set A are connected to nodes in set B by choosing
|
||||
randomly from the possible free stubs, one in A and one in B.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
|
||||
If no graph type is specified use MultiGraph with parallel edges.
|
||||
If you want a graph with no parallel edges use create_using=Graph()
|
||||
but then the resulting degree sequences might not be exact.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.configuration_model
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
|
||||
if G.is_directed():
|
||||
raise nx.NetworkXError("Directed Graph not supported")
|
||||
|
||||
# length and sum of each sequence
|
||||
lena = len(aseq)
|
||||
lenb = len(bseq)
|
||||
suma = sum(aseq)
|
||||
sumb = sum(bseq)
|
||||
|
||||
if not suma == sumb:
|
||||
raise nx.NetworkXError(
|
||||
f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
|
||||
)
|
||||
|
||||
G = _add_nodes_with_bipartite_label(G, lena, lenb)
|
||||
|
||||
if len(aseq) == 0 or max(aseq) == 0:
|
||||
return G # done if no edges
|
||||
|
||||
# build lists of degree-repeated vertex numbers
|
||||
stubs = []
|
||||
stubs.extend([[v] * aseq[v] for v in range(0, lena)])
|
||||
astubs = []
|
||||
astubs = [x for subseq in stubs for x in subseq]
|
||||
|
||||
stubs = []
|
||||
stubs.extend([[v] * bseq[v - lena] for v in range(lena, lena + lenb)])
|
||||
bstubs = []
|
||||
bstubs = [x for subseq in stubs for x in subseq]
|
||||
|
||||
# shuffle lists
|
||||
seed.shuffle(astubs)
|
||||
seed.shuffle(bstubs)
|
||||
|
||||
G.add_edges_from([[astubs[i], bstubs[i]] for i in range(suma)])
|
||||
|
||||
G.name = "bipartite_configuration_model"
|
||||
return G
|
||||
|
||||
|
||||
def havel_hakimi_graph(aseq, bseq, create_using=None):
|
||||
"""Returns a bipartite graph from two given degree sequences using a
|
||||
Havel-Hakimi style construction.
|
||||
|
||||
The graph is composed of two partitions. Set A has nodes 0 to
|
||||
(len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
|
||||
Nodes from the set A are connected to nodes in the set B by
|
||||
connecting the highest degree nodes in set A to the highest degree
|
||||
nodes in set B until all stubs are connected.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
aseq : list
|
||||
Degree sequence for node set A.
|
||||
bseq : list
|
||||
Degree sequence for node set B.
|
||||
create_using : NetworkX graph instance, optional
|
||||
Return graph of this type.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
|
||||
If no graph type is specified use MultiGraph with parallel edges.
|
||||
If you want a graph with no parallel edges use create_using=Graph()
|
||||
but then the resulting degree sequences might not be exact.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.havel_hakimi_graph
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
|
||||
if G.is_directed():
|
||||
raise nx.NetworkXError("Directed Graph not supported")
|
||||
|
||||
# length of the each sequence
|
||||
naseq = len(aseq)
|
||||
nbseq = len(bseq)
|
||||
|
||||
suma = sum(aseq)
|
||||
sumb = sum(bseq)
|
||||
|
||||
if not suma == sumb:
|
||||
raise nx.NetworkXError(
|
||||
f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
|
||||
)
|
||||
|
||||
G = _add_nodes_with_bipartite_label(G, naseq, nbseq)
|
||||
|
||||
if len(aseq) == 0 or max(aseq) == 0:
|
||||
return G # done if no edges
|
||||
|
||||
# build list of degree-repeated vertex numbers
|
||||
astubs = [[aseq[v], v] for v in range(0, naseq)]
|
||||
bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)]
|
||||
astubs.sort()
|
||||
while astubs:
|
||||
(degree, u) = astubs.pop() # take of largest degree node in the a set
|
||||
if degree == 0:
|
||||
break # done, all are zero
|
||||
# connect the source to largest degree nodes in the b set
|
||||
bstubs.sort()
|
||||
for target in bstubs[-degree:]:
|
||||
v = target[1]
|
||||
G.add_edge(u, v)
|
||||
target[0] -= 1 # note this updates bstubs too.
|
||||
if target[0] == 0:
|
||||
bstubs.remove(target)
|
||||
|
||||
G.name = "bipartite_havel_hakimi_graph"
|
||||
return G
|
||||
|
||||
|
||||
def reverse_havel_hakimi_graph(aseq, bseq, create_using=None):
|
||||
"""Returns a bipartite graph from two given degree sequences using a
|
||||
Havel-Hakimi style construction.
|
||||
|
||||
The graph is composed of two partitions. Set A has nodes 0 to
|
||||
(len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
|
||||
Nodes from set A are connected to nodes in the set B by connecting
|
||||
the highest degree nodes in set A to the lowest degree nodes in
|
||||
set B until all stubs are connected.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
aseq : list
|
||||
Degree sequence for node set A.
|
||||
bseq : list
|
||||
Degree sequence for node set B.
|
||||
create_using : NetworkX graph instance, optional
|
||||
Return graph of this type.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
|
||||
If no graph type is specified use MultiGraph with parallel edges.
|
||||
If you want a graph with no parallel edges use create_using=Graph()
|
||||
but then the resulting degree sequences might not be exact.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.reverse_havel_hakimi_graph
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
|
||||
if G.is_directed():
|
||||
raise nx.NetworkXError("Directed Graph not supported")
|
||||
|
||||
# length of the each sequence
|
||||
lena = len(aseq)
|
||||
lenb = len(bseq)
|
||||
suma = sum(aseq)
|
||||
sumb = sum(bseq)
|
||||
|
||||
if not suma == sumb:
|
||||
raise nx.NetworkXError(
|
||||
f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
|
||||
)
|
||||
|
||||
G = _add_nodes_with_bipartite_label(G, lena, lenb)
|
||||
|
||||
if len(aseq) == 0 or max(aseq) == 0:
|
||||
return G # done if no edges
|
||||
|
||||
# build list of degree-repeated vertex numbers
|
||||
astubs = [[aseq[v], v] for v in range(0, lena)]
|
||||
bstubs = [[bseq[v - lena], v] for v in range(lena, lena + lenb)]
|
||||
astubs.sort()
|
||||
bstubs.sort()
|
||||
while astubs:
|
||||
(degree, u) = astubs.pop() # take of largest degree node in the a set
|
||||
if degree == 0:
|
||||
break # done, all are zero
|
||||
# connect the source to the smallest degree nodes in the b set
|
||||
for target in bstubs[0:degree]:
|
||||
v = target[1]
|
||||
G.add_edge(u, v)
|
||||
target[0] -= 1 # note this updates bstubs too.
|
||||
if target[0] == 0:
|
||||
bstubs.remove(target)
|
||||
|
||||
G.name = "bipartite_reverse_havel_hakimi_graph"
|
||||
return G
|
||||
|
||||
|
||||
def alternating_havel_hakimi_graph(aseq, bseq, create_using=None):
|
||||
"""Returns a bipartite graph from two given degree sequences using
|
||||
an alternating Havel-Hakimi style construction.
|
||||
|
||||
The graph is composed of two partitions. Set A has nodes 0 to
|
||||
(len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
|
||||
Nodes from the set A are connected to nodes in the set B by
|
||||
connecting the highest degree nodes in set A to alternatively the
|
||||
highest and the lowest degree nodes in set B until all stubs are
|
||||
connected.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
aseq : list
|
||||
Degree sequence for node set A.
|
||||
bseq : list
|
||||
Degree sequence for node set B.
|
||||
create_using : NetworkX graph instance, optional
|
||||
Return graph of this type.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
|
||||
If no graph type is specified use MultiGraph with parallel edges.
|
||||
If you want a graph with no parallel edges use create_using=Graph()
|
||||
but then the resulting degree sequences might not be exact.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.alternating_havel_hakimi_graph
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
|
||||
if G.is_directed():
|
||||
raise nx.NetworkXError("Directed Graph not supported")
|
||||
|
||||
# length of the each sequence
|
||||
naseq = len(aseq)
|
||||
nbseq = len(bseq)
|
||||
suma = sum(aseq)
|
||||
sumb = sum(bseq)
|
||||
|
||||
if not suma == sumb:
|
||||
raise nx.NetworkXError(
|
||||
f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
|
||||
)
|
||||
|
||||
G = _add_nodes_with_bipartite_label(G, naseq, nbseq)
|
||||
|
||||
if len(aseq) == 0 or max(aseq) == 0:
|
||||
return G # done if no edges
|
||||
# build list of degree-repeated vertex numbers
|
||||
astubs = [[aseq[v], v] for v in range(0, naseq)]
|
||||
bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)]
|
||||
while astubs:
|
||||
astubs.sort()
|
||||
(degree, u) = astubs.pop() # take of largest degree node in the a set
|
||||
if degree == 0:
|
||||
break # done, all are zero
|
||||
bstubs.sort()
|
||||
small = bstubs[0 : degree // 2] # add these low degree targets
|
||||
large = bstubs[(-degree + degree // 2) :] # now high degree targets
|
||||
stubs = [x for z in zip(large, small) for x in z] # combine, sorry
|
||||
if len(stubs) < len(small) + len(large): # check for zip truncation
|
||||
stubs.append(large.pop())
|
||||
for target in stubs:
|
||||
v = target[1]
|
||||
G.add_edge(u, v)
|
||||
target[0] -= 1 # note this updates bstubs too.
|
||||
if target[0] == 0:
|
||||
bstubs.remove(target)
|
||||
|
||||
G.name = "bipartite_alternating_havel_hakimi_graph"
|
||||
return G
|
||||
|
||||
|
||||
@py_random_state(3)
|
||||
def preferential_attachment_graph(aseq, p, create_using=None, seed=None):
|
||||
"""Create a bipartite graph with a preferential attachment model from
|
||||
a given single degree sequence.
|
||||
|
||||
The graph is composed of two partitions. Set A has nodes 0 to
|
||||
(len(aseq) - 1) and set B has nodes starting with node len(aseq).
|
||||
The number of nodes in set B is random.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
aseq : list
|
||||
Degree sequence for node set A.
|
||||
p : float
|
||||
Probability that a new bottom node is added.
|
||||
create_using : NetworkX graph instance, optional
|
||||
Return graph of this type.
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Guillaume, J.L. and Latapy, M.,
|
||||
Bipartite graphs as models of complex networks.
|
||||
Physica A: Statistical Mechanics and its Applications,
|
||||
2006, 371(2), pp.795-813.
|
||||
.. [2] Jean-Loup Guillaume and Matthieu Latapy,
|
||||
Bipartite structure of all complex networks,
|
||||
Inf. Process. Lett. 90, 2004, pg. 215-221
|
||||
https://doi.org/10.1016/j.ipl.2004.03.007
|
||||
|
||||
Notes
|
||||
-----
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.preferential_attachment_graph
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
|
||||
if G.is_directed():
|
||||
raise nx.NetworkXError("Directed Graph not supported")
|
||||
|
||||
if p > 1:
|
||||
raise nx.NetworkXError(f"probability {p} > 1")
|
||||
|
||||
naseq = len(aseq)
|
||||
G = _add_nodes_with_bipartite_label(G, naseq, 0)
|
||||
vv = [[v] * aseq[v] for v in range(0, naseq)]
|
||||
while vv:
|
||||
while vv[0]:
|
||||
source = vv[0][0]
|
||||
vv[0].remove(source)
|
||||
if seed.random() < p or len(G) == naseq:
|
||||
target = len(G)
|
||||
G.add_node(target, bipartite=1)
|
||||
G.add_edge(source, target)
|
||||
else:
|
||||
bb = [[b] * G.degree(b) for b in range(naseq, len(G))]
|
||||
# flatten the list of lists into a list.
|
||||
bbstubs = reduce(lambda x, y: x + y, bb)
|
||||
# choose preferentially a bottom node.
|
||||
target = seed.choice(bbstubs)
|
||||
G.add_node(target, bipartite=1)
|
||||
G.add_edge(source, target)
|
||||
vv.remove(vv[0])
|
||||
G.name = "bipartite_preferential_attachment_model"
|
||||
return G
|
||||
|
||||
|
||||
@py_random_state(3)
|
||||
def random_graph(n, m, p, seed=None, directed=False):
|
||||
"""Returns a bipartite random graph.
|
||||
|
||||
This is a bipartite version of the binomial (Erdős-Rényi) graph.
|
||||
The graph is composed of two partitions. Set A has nodes 0 to
|
||||
(n - 1) and set B has nodes n to (n + m - 1).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
n : int
|
||||
The number of nodes in the first bipartite set.
|
||||
m : int
|
||||
The number of nodes in the second bipartite set.
|
||||
p : float
|
||||
Probability for edge creation.
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
directed : bool, optional (default=False)
|
||||
If True return a directed graph
|
||||
|
||||
Notes
|
||||
-----
|
||||
The bipartite random graph algorithm chooses each of the n*m (undirected)
|
||||
or 2*nm (directed) possible edges with probability p.
|
||||
|
||||
This algorithm is $O(n+m)$ where $m$ is the expected number of edges.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.random_graph
|
||||
|
||||
See Also
|
||||
--------
|
||||
gnp_random_graph, configuration_model
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Vladimir Batagelj and Ulrik Brandes,
|
||||
"Efficient generation of large random networks",
|
||||
Phys. Rev. E, 71, 036113, 2005.
|
||||
"""
|
||||
G = nx.Graph()
|
||||
G = _add_nodes_with_bipartite_label(G, n, m)
|
||||
if directed:
|
||||
G = nx.DiGraph(G)
|
||||
G.name = f"fast_gnp_random_graph({n},{m},{p})"
|
||||
|
||||
if p <= 0:
|
||||
return G
|
||||
if p >= 1:
|
||||
return nx.complete_bipartite_graph(n, m)
|
||||
|
||||
lp = math.log(1.0 - p)
|
||||
|
||||
v = 0
|
||||
w = -1
|
||||
while v < n:
|
||||
lr = math.log(1.0 - seed.random())
|
||||
w = w + 1 + int(lr / lp)
|
||||
while w >= m and v < n:
|
||||
w = w - m
|
||||
v = v + 1
|
||||
if v < n:
|
||||
G.add_edge(v, n + w)
|
||||
|
||||
if directed:
|
||||
# use the same algorithm to
|
||||
# add edges from the "m" to "n" set
|
||||
v = 0
|
||||
w = -1
|
||||
while v < n:
|
||||
lr = math.log(1.0 - seed.random())
|
||||
w = w + 1 + int(lr / lp)
|
||||
while w >= m and v < n:
|
||||
w = w - m
|
||||
v = v + 1
|
||||
if v < n:
|
||||
G.add_edge(n + w, v)
|
||||
|
||||
return G
|
||||
|
||||
|
||||
@py_random_state(3)
|
||||
def gnmk_random_graph(n, m, k, seed=None, directed=False):
|
||||
"""Returns a random bipartite graph G_{n,m,k}.
|
||||
|
||||
Produces a bipartite graph chosen randomly out of the set of all graphs
|
||||
with n top nodes, m bottom nodes, and k edges.
|
||||
The graph is composed of two sets of nodes.
|
||||
Set A has nodes 0 to (n - 1) and set B has nodes n to (n + m - 1).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
n : int
|
||||
The number of nodes in the first bipartite set.
|
||||
m : int
|
||||
The number of nodes in the second bipartite set.
|
||||
k : int
|
||||
The number of edges
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
directed : bool, optional (default=False)
|
||||
If True return a directed graph
|
||||
|
||||
Examples
|
||||
--------
|
||||
from nx.algorithms import bipartite
|
||||
G = bipartite.gnmk_random_graph(10,20,50)
|
||||
|
||||
See Also
|
||||
--------
|
||||
gnm_random_graph
|
||||
|
||||
Notes
|
||||
-----
|
||||
If k > m * n then a complete bipartite graph is returned.
|
||||
|
||||
This graph is a bipartite version of the `G_{nm}` random graph model.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.gnmk_random_graph
|
||||
"""
|
||||
G = nx.Graph()
|
||||
G = _add_nodes_with_bipartite_label(G, n, m)
|
||||
if directed:
|
||||
G = nx.DiGraph(G)
|
||||
G.name = f"bipartite_gnm_random_graph({n},{m},{k})"
|
||||
if n == 1 or m == 1:
|
||||
return G
|
||||
max_edges = n * m # max_edges for bipartite networks
|
||||
if k >= max_edges: # Maybe we should raise an exception here
|
||||
return nx.complete_bipartite_graph(n, m, create_using=G)
|
||||
|
||||
top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
|
||||
bottom = list(set(G) - set(top))
|
||||
edge_count = 0
|
||||
while edge_count < k:
|
||||
# generate random edge,u,v
|
||||
u = seed.choice(top)
|
||||
v = seed.choice(bottom)
|
||||
if v in G[u]:
|
||||
continue
|
||||
else:
|
||||
G.add_edge(u, v)
|
||||
edge_count += 1
|
||||
return G
|
||||
|
||||
|
||||
def _add_nodes_with_bipartite_label(G, lena, lenb):
|
||||
G.add_nodes_from(range(0, lena + lenb))
|
||||
b = dict(zip(range(0, lena), [0] * lena))
|
||||
b.update(dict(zip(range(lena, lena + lenb), [1] * lenb)))
|
||||
nx.set_node_attributes(G, b, "bipartite")
|
||||
return G
|
582
venv/Lib/site-packages/networkx/algorithms/bipartite/matching.py
Normal file
582
venv/Lib/site-packages/networkx/algorithms/bipartite/matching.py
Normal file
|
@ -0,0 +1,582 @@
|
|||
# This module uses material from the Wikipedia article Hopcroft--Karp algorithm
|
||||
# <https://en.wikipedia.org/wiki/Hopcroft%E2%80%93Karp_algorithm>, accessed on
|
||||
# January 3, 2015, which is released under the Creative Commons
|
||||
# Attribution-Share-Alike License 3.0
|
||||
# <http://creativecommons.org/licenses/by-sa/3.0/>. That article includes
|
||||
# pseudocode, which has been translated into the corresponding Python code.
|
||||
#
|
||||
# Portions of this module use code from David Eppstein's Python Algorithms and
|
||||
# Data Structures (PADS) library, which is dedicated to the public domain (for
|
||||
# proof, see <http://www.ics.uci.edu/~eppstein/PADS/ABOUT-PADS.txt>).
|
||||
"""Provides functions for computing maximum cardinality matchings and minimum
|
||||
weight full matchings in a bipartite graph.
|
||||
|
||||
If you don't care about the particular implementation of the maximum matching
|
||||
algorithm, simply use the :func:`maximum_matching`. If you do care, you can
|
||||
import one of the named maximum matching algorithms directly.
|
||||
|
||||
For example, to find a maximum matching in the complete bipartite graph with
|
||||
two vertices on the left and three vertices on the right:
|
||||
|
||||
>>> G = nx.complete_bipartite_graph(2, 3)
|
||||
>>> left, right = nx.bipartite.sets(G)
|
||||
>>> list(left)
|
||||
[0, 1]
|
||||
>>> list(right)
|
||||
[2, 3, 4]
|
||||
>>> nx.bipartite.maximum_matching(G)
|
||||
{0: 2, 1: 3, 2: 0, 3: 1}
|
||||
|
||||
The dictionary returned by :func:`maximum_matching` includes a mapping for
|
||||
vertices in both the left and right vertex sets.
|
||||
|
||||
Similarly, :func:`minimum_weight_full_matching` produces, for a complete
|
||||
weighted bipartite graph, a matching whose cardinality is the cardinality of
|
||||
the smaller of the two partitions, and for which the sum of the weights of the
|
||||
edges included in the matching is minimal.
|
||||
|
||||
"""
|
||||
import collections
|
||||
import itertools
|
||||
|
||||
from networkx.algorithms.bipartite.matrix import biadjacency_matrix
|
||||
from networkx.algorithms.bipartite import sets as bipartite_sets
|
||||
import networkx as nx
|
||||
|
||||
__all__ = [
|
||||
"maximum_matching",
|
||||
"hopcroft_karp_matching",
|
||||
"eppstein_matching",
|
||||
"to_vertex_cover",
|
||||
"minimum_weight_full_matching",
|
||||
]
|
||||
|
||||
INFINITY = float("inf")
|
||||
|
||||
|
||||
def hopcroft_karp_matching(G, top_nodes=None):
|
||||
"""Returns the maximum cardinality matching of the bipartite graph `G`.
|
||||
|
||||
A matching is a set of edges that do not share any nodes. A maximum
|
||||
cardinality matching is a matching with the most edges possible. It
|
||||
is not always unique. Finding a matching in a bipartite graph can be
|
||||
treated as a networkx flow problem.
|
||||
|
||||
The functions ``hopcroft_karp_matching`` and ``maximum_matching``
|
||||
are aliases of the same function.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Undirected bipartite graph
|
||||
|
||||
top_nodes : container of nodes
|
||||
|
||||
Container with all nodes in one bipartite node set. If not supplied
|
||||
it will be computed. But if more than one solution exists an exception
|
||||
will be raised.
|
||||
|
||||
Returns
|
||||
-------
|
||||
matches : dictionary
|
||||
|
||||
The matching is returned as a dictionary, `matches`, such that
|
||||
``matches[v] == w`` if node `v` is matched to node `w`. Unmatched
|
||||
nodes do not occur as a key in `matches`.
|
||||
|
||||
Raises
|
||||
------
|
||||
AmbiguousSolution
|
||||
Raised if the input bipartite graph is disconnected and no container
|
||||
with all nodes in one bipartite set is provided. When determining
|
||||
the nodes in each bipartite set more than one valid solution is
|
||||
possible if the input graph is disconnected.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This function is implemented with the `Hopcroft--Karp matching algorithm
|
||||
<https://en.wikipedia.org/wiki/Hopcroft%E2%80%93Karp_algorithm>`_ for
|
||||
bipartite graphs.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
maximum_matching
|
||||
hopcroft_karp_matching
|
||||
eppstein_matching
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] John E. Hopcroft and Richard M. Karp. "An n^{5 / 2} Algorithm for
|
||||
Maximum Matchings in Bipartite Graphs" In: **SIAM Journal of Computing**
|
||||
2.4 (1973), pp. 225--231. <https://doi.org/10.1137/0202019>.
|
||||
|
||||
"""
|
||||
# First we define some auxiliary search functions.
|
||||
#
|
||||
# If you are a human reading these auxiliary search functions, the "global"
|
||||
# variables `leftmatches`, `rightmatches`, `distances`, etc. are defined
|
||||
# below the functions, so that they are initialized close to the initial
|
||||
# invocation of the search functions.
|
||||
def breadth_first_search():
|
||||
for v in left:
|
||||
if leftmatches[v] is None:
|
||||
distances[v] = 0
|
||||
queue.append(v)
|
||||
else:
|
||||
distances[v] = INFINITY
|
||||
distances[None] = INFINITY
|
||||
while queue:
|
||||
v = queue.popleft()
|
||||
if distances[v] < distances[None]:
|
||||
for u in G[v]:
|
||||
if distances[rightmatches[u]] is INFINITY:
|
||||
distances[rightmatches[u]] = distances[v] + 1
|
||||
queue.append(rightmatches[u])
|
||||
return distances[None] is not INFINITY
|
||||
|
||||
def depth_first_search(v):
|
||||
if v is not None:
|
||||
for u in G[v]:
|
||||
if distances[rightmatches[u]] == distances[v] + 1:
|
||||
if depth_first_search(rightmatches[u]):
|
||||
rightmatches[u] = v
|
||||
leftmatches[v] = u
|
||||
return True
|
||||
distances[v] = INFINITY
|
||||
return False
|
||||
return True
|
||||
|
||||
# Initialize the "global" variables that maintain state during the search.
|
||||
left, right = bipartite_sets(G, top_nodes)
|
||||
leftmatches = {v: None for v in left}
|
||||
rightmatches = {v: None for v in right}
|
||||
distances = {}
|
||||
queue = collections.deque()
|
||||
|
||||
# Implementation note: this counter is incremented as pairs are matched but
|
||||
# it is currently not used elsewhere in the computation.
|
||||
num_matched_pairs = 0
|
||||
while breadth_first_search():
|
||||
for v in left:
|
||||
if leftmatches[v] is None:
|
||||
if depth_first_search(v):
|
||||
num_matched_pairs += 1
|
||||
|
||||
# Strip the entries matched to `None`.
|
||||
leftmatches = {k: v for k, v in leftmatches.items() if v is not None}
|
||||
rightmatches = {k: v for k, v in rightmatches.items() if v is not None}
|
||||
|
||||
# At this point, the left matches and the right matches are inverses of one
|
||||
# another. In other words,
|
||||
#
|
||||
# leftmatches == {v, k for k, v in rightmatches.items()}
|
||||
#
|
||||
# Finally, we combine both the left matches and right matches.
|
||||
return dict(itertools.chain(leftmatches.items(), rightmatches.items()))
|
||||
|
||||
|
||||
def eppstein_matching(G, top_nodes=None):
|
||||
"""Returns the maximum cardinality matching of the bipartite graph `G`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Undirected bipartite graph
|
||||
|
||||
top_nodes : container
|
||||
|
||||
Container with all nodes in one bipartite node set. If not supplied
|
||||
it will be computed. But if more than one solution exists an exception
|
||||
will be raised.
|
||||
|
||||
Returns
|
||||
-------
|
||||
matches : dictionary
|
||||
|
||||
The matching is returned as a dictionary, `matching`, such that
|
||||
``matching[v] == w`` if node `v` is matched to node `w`. Unmatched
|
||||
nodes do not occur as a key in `matching`.
|
||||
|
||||
Raises
|
||||
------
|
||||
AmbiguousSolution
|
||||
Raised if the input bipartite graph is disconnected and no container
|
||||
with all nodes in one bipartite set is provided. When determining
|
||||
the nodes in each bipartite set more than one valid solution is
|
||||
possible if the input graph is disconnected.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This function is implemented with David Eppstein's version of the algorithm
|
||||
Hopcroft--Karp algorithm (see :func:`hopcroft_karp_matching`), which
|
||||
originally appeared in the `Python Algorithms and Data Structures library
|
||||
(PADS) <http://www.ics.uci.edu/~eppstein/PADS/ABOUT-PADS.txt>`_.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
|
||||
hopcroft_karp_matching
|
||||
|
||||
"""
|
||||
# Due to its original implementation, a directed graph is needed
|
||||
# so that the two sets of bipartite nodes can be distinguished
|
||||
left, right = bipartite_sets(G, top_nodes)
|
||||
G = nx.DiGraph(G.edges(left))
|
||||
# initialize greedy matching (redundant, but faster than full search)
|
||||
matching = {}
|
||||
for u in G:
|
||||
for v in G[u]:
|
||||
if v not in matching:
|
||||
matching[v] = u
|
||||
break
|
||||
while True:
|
||||
# structure residual graph into layers
|
||||
# pred[u] gives the neighbor in the previous layer for u in U
|
||||
# preds[v] gives a list of neighbors in the previous layer for v in V
|
||||
# unmatched gives a list of unmatched vertices in final layer of V,
|
||||
# and is also used as a flag value for pred[u] when u is in the first
|
||||
# layer
|
||||
preds = {}
|
||||
unmatched = []
|
||||
pred = {u: unmatched for u in G}
|
||||
for v in matching:
|
||||
del pred[matching[v]]
|
||||
layer = list(pred)
|
||||
|
||||
# repeatedly extend layering structure by another pair of layers
|
||||
while layer and not unmatched:
|
||||
newLayer = {}
|
||||
for u in layer:
|
||||
for v in G[u]:
|
||||
if v not in preds:
|
||||
newLayer.setdefault(v, []).append(u)
|
||||
layer = []
|
||||
for v in newLayer:
|
||||
preds[v] = newLayer[v]
|
||||
if v in matching:
|
||||
layer.append(matching[v])
|
||||
pred[matching[v]] = v
|
||||
else:
|
||||
unmatched.append(v)
|
||||
|
||||
# did we finish layering without finding any alternating paths?
|
||||
if not unmatched:
|
||||
unlayered = {}
|
||||
for u in G:
|
||||
# TODO Why is extra inner loop necessary?
|
||||
for v in G[u]:
|
||||
if v not in preds:
|
||||
unlayered[v] = None
|
||||
# TODO Originally, this function returned a three-tuple:
|
||||
#
|
||||
# return (matching, list(pred), list(unlayered))
|
||||
#
|
||||
# For some reason, the documentation for this function
|
||||
# indicated that the second and third elements of the returned
|
||||
# three-tuple would be the vertices in the left and right vertex
|
||||
# sets, respectively, that are also in the maximum independent set.
|
||||
# However, what I think the author meant was that the second
|
||||
# element is the list of vertices that were unmatched and the third
|
||||
# element was the list of vertices that were matched. Since that
|
||||
# seems to be the case, they don't really need to be returned,
|
||||
# since that information can be inferred from the matching
|
||||
# dictionary.
|
||||
|
||||
# All the matched nodes must be a key in the dictionary
|
||||
for key in matching.copy():
|
||||
matching[matching[key]] = key
|
||||
return matching
|
||||
|
||||
# recursively search backward through layers to find alternating paths
|
||||
# recursion returns true if found path, false otherwise
|
||||
def recurse(v):
|
||||
if v in preds:
|
||||
L = preds.pop(v)
|
||||
for u in L:
|
||||
if u in pred:
|
||||
pu = pred.pop(u)
|
||||
if pu is unmatched or recurse(pu):
|
||||
matching[v] = u
|
||||
return True
|
||||
return False
|
||||
|
||||
for v in unmatched:
|
||||
recurse(v)
|
||||
|
||||
|
||||
def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, targets):
|
||||
"""Returns True if and only if the vertex `v` is connected to one of
|
||||
the target vertices by an alternating path in `G`.
|
||||
|
||||
An *alternating path* is a path in which every other edge is in the
|
||||
specified maximum matching (and the remaining edges in the path are not in
|
||||
the matching). An alternating path may have matched edges in the even
|
||||
positions or in the odd positions, as long as the edges alternate between
|
||||
'matched' and 'unmatched'.
|
||||
|
||||
`G` is an undirected bipartite NetworkX graph.
|
||||
|
||||
`v` is a vertex in `G`.
|
||||
|
||||
`matched_edges` is a set of edges present in a maximum matching in `G`.
|
||||
|
||||
`unmatched_edges` is a set of edges not present in a maximum
|
||||
matching in `G`.
|
||||
|
||||
`targets` is a set of vertices.
|
||||
|
||||
"""
|
||||
|
||||
def _alternating_dfs(u, along_matched=True):
|
||||
"""Returns True if and only if `u` is connected to one of the
|
||||
targets by an alternating path.
|
||||
|
||||
`u` is a vertex in the graph `G`.
|
||||
|
||||
If `along_matched` is True, this step of the depth-first search
|
||||
will continue only through edges in the given matching. Otherwise, it
|
||||
will continue only through edges *not* in the given matching.
|
||||
|
||||
"""
|
||||
if along_matched:
|
||||
edges = itertools.cycle([matched_edges, unmatched_edges])
|
||||
else:
|
||||
edges = itertools.cycle([unmatched_edges, matched_edges])
|
||||
visited = set()
|
||||
stack = [(u, iter(G[u]), next(edges))]
|
||||
while stack:
|
||||
parent, children, valid_edges = stack[-1]
|
||||
try:
|
||||
child = next(children)
|
||||
if child not in visited:
|
||||
if (parent, child) in valid_edges or (child, parent) in valid_edges:
|
||||
if child in targets:
|
||||
return True
|
||||
visited.add(child)
|
||||
stack.append((child, iter(G[child]), next(edges)))
|
||||
except StopIteration:
|
||||
stack.pop()
|
||||
return False
|
||||
|
||||
# Check for alternating paths starting with edges in the matching, then
|
||||
# check for alternating paths starting with edges not in the
|
||||
# matching.
|
||||
return _alternating_dfs(v, along_matched=True) or _alternating_dfs(
|
||||
v, along_matched=False
|
||||
)
|
||||
|
||||
|
||||
def _connected_by_alternating_paths(G, matching, targets):
|
||||
"""Returns the set of vertices that are connected to one of the target
|
||||
vertices by an alternating path in `G` or are themselves a target.
|
||||
|
||||
An *alternating path* is a path in which every other edge is in the
|
||||
specified maximum matching (and the remaining edges in the path are not in
|
||||
the matching). An alternating path may have matched edges in the even
|
||||
positions or in the odd positions, as long as the edges alternate between
|
||||
'matched' and 'unmatched'.
|
||||
|
||||
`G` is an undirected bipartite NetworkX graph.
|
||||
|
||||
`matching` is a dictionary representing a maximum matching in `G`, as
|
||||
returned by, for example, :func:`maximum_matching`.
|
||||
|
||||
`targets` is a set of vertices.
|
||||
|
||||
"""
|
||||
# Get the set of matched edges and the set of unmatched edges. Only include
|
||||
# one version of each undirected edge (for example, include edge (1, 2) but
|
||||
# not edge (2, 1)). Using frozensets as an intermediary step we do not
|
||||
# require nodes to be orderable.
|
||||
edge_sets = {frozenset((u, v)) for u, v in matching.items()}
|
||||
matched_edges = {tuple(edge) for edge in edge_sets}
|
||||
unmatched_edges = {
|
||||
(u, v) for (u, v) in G.edges() if frozenset((u, v)) not in edge_sets
|
||||
}
|
||||
|
||||
return {
|
||||
v
|
||||
for v in G
|
||||
if v in targets
|
||||
or _is_connected_by_alternating_path(
|
||||
G, v, matched_edges, unmatched_edges, targets
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def to_vertex_cover(G, matching, top_nodes=None):
|
||||
"""Returns the minimum vertex cover corresponding to the given maximum
|
||||
matching of the bipartite graph `G`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Undirected bipartite graph
|
||||
|
||||
matching : dictionary
|
||||
|
||||
A dictionary whose keys are vertices in `G` and whose values are the
|
||||
distinct neighbors comprising the maximum matching for `G`, as returned
|
||||
by, for example, :func:`maximum_matching`. The dictionary *must*
|
||||
represent the maximum matching.
|
||||
|
||||
top_nodes : container
|
||||
|
||||
Container with all nodes in one bipartite node set. If not supplied
|
||||
it will be computed. But if more than one solution exists an exception
|
||||
will be raised.
|
||||
|
||||
Returns
|
||||
-------
|
||||
vertex_cover : :class:`set`
|
||||
|
||||
The minimum vertex cover in `G`.
|
||||
|
||||
Raises
|
||||
------
|
||||
AmbiguousSolution
|
||||
Raised if the input bipartite graph is disconnected and no container
|
||||
with all nodes in one bipartite set is provided. When determining
|
||||
the nodes in each bipartite set more than one valid solution is
|
||||
possible if the input graph is disconnected.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This function is implemented using the procedure guaranteed by `Konig's
|
||||
theorem
|
||||
<https://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29>`_,
|
||||
which proves an equivalence between a maximum matching and a minimum vertex
|
||||
cover in bipartite graphs.
|
||||
|
||||
Since a minimum vertex cover is the complement of a maximum independent set
|
||||
for any graph, one can compute the maximum independent set of a bipartite
|
||||
graph this way:
|
||||
|
||||
>>> G = nx.complete_bipartite_graph(2, 3)
|
||||
>>> matching = nx.bipartite.maximum_matching(G)
|
||||
>>> vertex_cover = nx.bipartite.to_vertex_cover(G, matching)
|
||||
>>> independent_set = set(G) - vertex_cover
|
||||
>>> print(list(independent_set))
|
||||
[2, 3, 4]
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
"""
|
||||
# This is a Python implementation of the algorithm described at
|
||||
# <https://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29#Proof>.
|
||||
L, R = bipartite_sets(G, top_nodes)
|
||||
# Let U be the set of unmatched vertices in the left vertex set.
|
||||
unmatched_vertices = set(G) - set(matching)
|
||||
U = unmatched_vertices & L
|
||||
# Let Z be the set of vertices that are either in U or are connected to U
|
||||
# by alternating paths.
|
||||
Z = _connected_by_alternating_paths(G, matching, U)
|
||||
# At this point, every edge either has a right endpoint in Z or a left
|
||||
# endpoint not in Z. This gives us the vertex cover.
|
||||
return (L - Z) | (R & Z)
|
||||
|
||||
|
||||
#: Returns the maximum cardinality matching in the given bipartite graph.
|
||||
#:
|
||||
#: This function is simply an alias for :func:`hopcroft_karp_matching`.
|
||||
maximum_matching = hopcroft_karp_matching
|
||||
|
||||
|
||||
def minimum_weight_full_matching(G, top_nodes=None, weight="weight"):
|
||||
r"""Returns a minimum weight full matching of the bipartite graph `G`.
|
||||
|
||||
Let :math:`G = ((U, V), E)` be a weighted bipartite graph with real weights
|
||||
:math:`w : E \to \mathbb{R}`. This function then produces a matching
|
||||
:math:`M \subseteq E` with cardinality
|
||||
|
||||
.. math::
|
||||
\lvert M \rvert = \min(\lvert U \rvert, \lvert V \rvert),
|
||||
|
||||
which minimizes the sum of the weights of the edges included in the
|
||||
matching, :math:`\sum_{e \in M} w(e)`, or raises an error if no such
|
||||
matching exists.
|
||||
|
||||
When :math:`\lvert U \rvert = \lvert V \rvert`, this is commonly
|
||||
referred to as a perfect matching; here, since we allow
|
||||
:math:`\lvert U \rvert` and :math:`\lvert V \rvert` to differ, we
|
||||
follow Karp [1]_ and refer to the matching as *full*.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Undirected bipartite graph
|
||||
|
||||
top_nodes : container
|
||||
|
||||
Container with all nodes in one bipartite node set. If not supplied
|
||||
it will be computed.
|
||||
|
||||
weight : string, optional (default='weight')
|
||||
|
||||
The edge data key used to provide each value in the matrix.
|
||||
|
||||
Returns
|
||||
-------
|
||||
matches : dictionary
|
||||
|
||||
The matching is returned as a dictionary, `matches`, such that
|
||||
``matches[v] == w`` if node `v` is matched to node `w`. Unmatched
|
||||
nodes do not occur as a key in `matches`.
|
||||
|
||||
Raises
|
||||
------
|
||||
ValueError
|
||||
Raised if no full matching exists.
|
||||
|
||||
ImportError
|
||||
Raised if SciPy is not available.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The problem of determining a minimum weight full matching is also known as
|
||||
the rectangular linear assignment problem. This implementation defers the
|
||||
calculation of the assignment to SciPy.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Richard Manning Karp:
|
||||
An algorithm to Solve the m x n Assignment Problem in Expected Time
|
||||
O(mn log n).
|
||||
Networks, 10(2):143–152, 1980.
|
||||
|
||||
"""
|
||||
try:
|
||||
import numpy as np
|
||||
import scipy.optimize
|
||||
except ImportError as e:
|
||||
raise ImportError(
|
||||
"minimum_weight_full_matching requires SciPy: " + "https://scipy.org/"
|
||||
) from e
|
||||
left, right = nx.bipartite.sets(G, top_nodes)
|
||||
U = list(left)
|
||||
V = list(right)
|
||||
# We explicitly create the biadjancency matrix having infinities
|
||||
# where edges are missing (as opposed to zeros, which is what one would
|
||||
# get by using toarray on the sparse matrix).
|
||||
weights_sparse = biadjacency_matrix(
|
||||
G, row_order=U, column_order=V, weight=weight, format="coo"
|
||||
)
|
||||
weights = np.full(weights_sparse.shape, np.inf)
|
||||
weights[weights_sparse.row, weights_sparse.col] = weights_sparse.data
|
||||
left_matches = scipy.optimize.linear_sum_assignment(weights)
|
||||
d = {U[u]: V[v] for u, v in zip(*left_matches)}
|
||||
# d will contain the matching from edges in left to right; we need to
|
||||
# add the ones from right to left as well.
|
||||
d.update({v: u for u, v in d.items()})
|
||||
return d
|
166
venv/Lib/site-packages/networkx/algorithms/bipartite/matrix.py
Normal file
166
venv/Lib/site-packages/networkx/algorithms/bipartite/matrix.py
Normal file
|
@ -0,0 +1,166 @@
|
|||
"""
|
||||
====================
|
||||
Biadjacency matrices
|
||||
====================
|
||||
"""
|
||||
import itertools
|
||||
from networkx.convert_matrix import _generate_weighted_edges
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["biadjacency_matrix", "from_biadjacency_matrix"]
|
||||
|
||||
|
||||
def biadjacency_matrix(
|
||||
G, row_order, column_order=None, dtype=None, weight="weight", format="csr"
|
||||
):
|
||||
r"""Returns the biadjacency matrix of the bipartite graph G.
|
||||
|
||||
Let `G = (U, V, E)` be a bipartite graph with node sets
|
||||
`U = u_{1},...,u_{r}` and `V = v_{1},...,v_{s}`. The biadjacency
|
||||
matrix [1]_ is the `r` x `s` matrix `B` in which `b_{i,j} = 1`
|
||||
if, and only if, `(u_i, v_j) \in E`. If the parameter `weight` is
|
||||
not `None` and matches the name of an edge attribute, its value is
|
||||
used instead of 1.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
row_order : list of nodes
|
||||
The rows of the matrix are ordered according to the list of nodes.
|
||||
|
||||
column_order : list, optional
|
||||
The columns of the matrix are ordered according to the list of nodes.
|
||||
If column_order is None, then the ordering of columns is arbitrary.
|
||||
|
||||
dtype : NumPy data-type, optional
|
||||
A valid NumPy dtype used to initialize the array. If None, then the
|
||||
NumPy default is used.
|
||||
|
||||
weight : string or None, optional (default='weight')
|
||||
The edge data key used to provide each value in the matrix.
|
||||
If None, then each edge has weight 1.
|
||||
|
||||
format : str in {'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'}
|
||||
The type of the matrix to be returned (default 'csr'). For
|
||||
some algorithms different implementations of sparse matrices
|
||||
can perform better. See [2]_ for details.
|
||||
|
||||
Returns
|
||||
-------
|
||||
M : SciPy sparse matrix
|
||||
Biadjacency matrix representation of the bipartite graph G.
|
||||
|
||||
Notes
|
||||
-----
|
||||
No attempt is made to check that the input graph is bipartite.
|
||||
|
||||
For directed bipartite graphs only successors are considered as neighbors.
|
||||
To obtain an adjacency matrix with ones (or weight values) for both
|
||||
predecessors and successors you have to generate two biadjacency matrices
|
||||
where the rows of one of them are the columns of the other, and then add
|
||||
one to the transpose of the other.
|
||||
|
||||
See Also
|
||||
--------
|
||||
adjacency_matrix
|
||||
from_biadjacency_matrix
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
|
||||
.. [2] Scipy Dev. References, "Sparse Matrices",
|
||||
https://docs.scipy.org/doc/scipy/reference/sparse.html
|
||||
"""
|
||||
from scipy import sparse
|
||||
|
||||
nlen = len(row_order)
|
||||
if nlen == 0:
|
||||
raise nx.NetworkXError("row_order is empty list")
|
||||
if len(row_order) != len(set(row_order)):
|
||||
msg = "Ambiguous ordering: `row_order` contained duplicates."
|
||||
raise nx.NetworkXError(msg)
|
||||
if column_order is None:
|
||||
column_order = list(set(G) - set(row_order))
|
||||
mlen = len(column_order)
|
||||
if len(column_order) != len(set(column_order)):
|
||||
msg = "Ambiguous ordering: `column_order` contained duplicates."
|
||||
raise nx.NetworkXError(msg)
|
||||
|
||||
row_index = dict(zip(row_order, itertools.count()))
|
||||
col_index = dict(zip(column_order, itertools.count()))
|
||||
|
||||
if G.number_of_edges() == 0:
|
||||
row, col, data = [], [], []
|
||||
else:
|
||||
row, col, data = zip(
|
||||
*(
|
||||
(row_index[u], col_index[v], d.get(weight, 1))
|
||||
for u, v, d in G.edges(row_order, data=True)
|
||||
if u in row_index and v in col_index
|
||||
)
|
||||
)
|
||||
M = sparse.coo_matrix((data, (row, col)), shape=(nlen, mlen), dtype=dtype)
|
||||
try:
|
||||
return M.asformat(format)
|
||||
# From Scipy 1.1.0, asformat will throw a ValueError instead of an
|
||||
# AttributeError if the format if not recognized.
|
||||
except (AttributeError, ValueError) as e:
|
||||
raise nx.NetworkXError(f"Unknown sparse matrix format: {format}") from e
|
||||
|
||||
|
||||
def from_biadjacency_matrix(A, create_using=None, edge_attribute="weight"):
|
||||
r"""Creates a new bipartite graph from a biadjacency matrix given as a
|
||||
SciPy sparse matrix.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
A: scipy sparse matrix
|
||||
A biadjacency matrix representation of a graph
|
||||
|
||||
create_using: NetworkX graph
|
||||
Use specified graph for result. The default is Graph()
|
||||
|
||||
edge_attribute: string
|
||||
Name of edge attribute to store matrix numeric value. The data will
|
||||
have the same type as the matrix entry (int, float, (real,imag)).
|
||||
|
||||
Notes
|
||||
-----
|
||||
The nodes are labeled with the attribute `bipartite` set to an integer
|
||||
0 or 1 representing membership in part 0 or part 1 of the bipartite graph.
|
||||
|
||||
If `create_using` is an instance of :class:`networkx.MultiGraph` or
|
||||
:class:`networkx.MultiDiGraph` and the entries of `A` are of
|
||||
type :class:`int`, then this function returns a multigraph (of the same
|
||||
type as `create_using`) with parallel edges. In this case, `edge_attribute`
|
||||
will be ignored.
|
||||
|
||||
See Also
|
||||
--------
|
||||
biadjacency_matrix
|
||||
from_numpy_array
|
||||
|
||||
References
|
||||
----------
|
||||
[1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using)
|
||||
n, m = A.shape
|
||||
# Make sure we get even the isolated nodes of the graph.
|
||||
G.add_nodes_from(range(n), bipartite=0)
|
||||
G.add_nodes_from(range(n, n + m), bipartite=1)
|
||||
# Create an iterable over (u, v, w) triples and for each triple, add an
|
||||
# edge from u to v with weight w.
|
||||
triples = ((u, n + v, d) for (u, v, d) in _generate_weighted_edges(A))
|
||||
# If the entries in the adjacency matrix are integers and the graph is a
|
||||
# multigraph, then create parallel edges, each with weight 1, for each
|
||||
# entry in the adjacency matrix. Otherwise, create one edge for each
|
||||
# positive entry in the adjacency matrix and set the weight of that edge to
|
||||
# be the entry in the matrix.
|
||||
if A.dtype.kind in ("i", "u") and G.is_multigraph():
|
||||
chain = itertools.chain.from_iterable
|
||||
triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples)
|
||||
G.add_weighted_edges_from(triples, weight=edge_attribute)
|
||||
return G
|
|
@ -0,0 +1,516 @@
|
|||
"""One-mode (unipartite) projections of bipartite graphs."""
|
||||
import networkx as nx
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
__all__ = [
|
||||
"project",
|
||||
"projected_graph",
|
||||
"weighted_projected_graph",
|
||||
"collaboration_weighted_projected_graph",
|
||||
"overlap_weighted_projected_graph",
|
||||
"generic_weighted_projected_graph",
|
||||
]
|
||||
|
||||
|
||||
def projected_graph(B, nodes, multigraph=False):
|
||||
r"""Returns the projection of B onto one of its node sets.
|
||||
|
||||
Returns the graph G that is the projection of the bipartite graph B
|
||||
onto the specified nodes. They retain their attributes and are connected
|
||||
in G if they have a common neighbor in B.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
B : NetworkX graph
|
||||
The input graph should be bipartite.
|
||||
|
||||
nodes : list or iterable
|
||||
Nodes to project onto (the "bottom" nodes).
|
||||
|
||||
multigraph: bool (default=False)
|
||||
If True return a multigraph where the multiple edges represent multiple
|
||||
shared neighbors. They edge key in the multigraph is assigned to the
|
||||
label of the neighbor.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Graph : NetworkX graph or multigraph
|
||||
A graph that is the projection onto the given nodes.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> B = nx.path_graph(4)
|
||||
>>> G = bipartite.projected_graph(B, [1, 3])
|
||||
>>> list(G)
|
||||
[1, 3]
|
||||
>>> list(G.edges())
|
||||
[(1, 3)]
|
||||
|
||||
If nodes `a`, and `b` are connected through both nodes 1 and 2 then
|
||||
building a multigraph results in two edges in the projection onto
|
||||
[`a`, `b`]:
|
||||
|
||||
>>> B = nx.Graph()
|
||||
>>> B.add_edges_from([("a", 1), ("b", 1), ("a", 2), ("b", 2)])
|
||||
>>> G = bipartite.projected_graph(B, ["a", "b"], multigraph=True)
|
||||
>>> print([sorted((u, v)) for u, v in G.edges()])
|
||||
[['a', 'b'], ['a', 'b']]
|
||||
|
||||
Notes
|
||||
-----
|
||||
No attempt is made to verify that the input graph B is bipartite.
|
||||
Returns a simple graph that is the projection of the bipartite graph B
|
||||
onto the set of nodes given in list nodes. If multigraph=True then
|
||||
a multigraph is returned with an edge for every shared neighbor.
|
||||
|
||||
Directed graphs are allowed as input. The output will also then
|
||||
be a directed graph with edges if there is a directed path between
|
||||
the nodes.
|
||||
|
||||
The graph and node properties are (shallow) copied to the projected graph.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
is_bipartite,
|
||||
is_bipartite_node_set,
|
||||
sets,
|
||||
weighted_projected_graph,
|
||||
collaboration_weighted_projected_graph,
|
||||
overlap_weighted_projected_graph,
|
||||
generic_weighted_projected_graph
|
||||
"""
|
||||
if B.is_multigraph():
|
||||
raise nx.NetworkXError("not defined for multigraphs")
|
||||
if B.is_directed():
|
||||
directed = True
|
||||
if multigraph:
|
||||
G = nx.MultiDiGraph()
|
||||
else:
|
||||
G = nx.DiGraph()
|
||||
else:
|
||||
directed = False
|
||||
if multigraph:
|
||||
G = nx.MultiGraph()
|
||||
else:
|
||||
G = nx.Graph()
|
||||
G.graph.update(B.graph)
|
||||
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
||||
for u in nodes:
|
||||
nbrs2 = {v for nbr in B[u] for v in B[nbr] if v != u}
|
||||
if multigraph:
|
||||
for n in nbrs2:
|
||||
if directed:
|
||||
links = set(B[u]) & set(B.pred[n])
|
||||
else:
|
||||
links = set(B[u]) & set(B[n])
|
||||
for l in links:
|
||||
if not G.has_edge(u, n, l):
|
||||
G.add_edge(u, n, key=l)
|
||||
else:
|
||||
G.add_edges_from((u, n) for n in nbrs2)
|
||||
return G
|
||||
|
||||
|
||||
@not_implemented_for("multigraph")
|
||||
def weighted_projected_graph(B, nodes, ratio=False):
|
||||
r"""Returns a weighted projection of B onto one of its node sets.
|
||||
|
||||
The weighted projected graph is the projection of the bipartite
|
||||
network B onto the specified nodes with weights representing the
|
||||
number of shared neighbors or the ratio between actual shared
|
||||
neighbors and possible shared neighbors if ``ratio is True`` [1]_.
|
||||
The nodes retain their attributes and are connected in the resulting
|
||||
graph if they have an edge to a common node in the original graph.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
B : NetworkX graph
|
||||
The input graph should be bipartite.
|
||||
|
||||
nodes : list or iterable
|
||||
Nodes to project onto (the "bottom" nodes).
|
||||
|
||||
ratio: Bool (default=False)
|
||||
If True, edge weight is the ratio between actual shared neighbors
|
||||
and maximum possible shared neighbors (i.e., the size of the other
|
||||
node set). If False, edges weight is the number of shared neighbors.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Graph : NetworkX graph
|
||||
A graph that is the projection onto the given nodes.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> B = nx.path_graph(4)
|
||||
>>> G = bipartite.weighted_projected_graph(B, [1, 3])
|
||||
>>> list(G)
|
||||
[1, 3]
|
||||
>>> list(G.edges(data=True))
|
||||
[(1, 3, {'weight': 1})]
|
||||
>>> G = bipartite.weighted_projected_graph(B, [1, 3], ratio=True)
|
||||
>>> list(G.edges(data=True))
|
||||
[(1, 3, {'weight': 0.5})]
|
||||
|
||||
Notes
|
||||
-----
|
||||
No attempt is made to verify that the input graph B is bipartite.
|
||||
The graph and node properties are (shallow) copied to the projected graph.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
is_bipartite,
|
||||
is_bipartite_node_set,
|
||||
sets,
|
||||
collaboration_weighted_projected_graph,
|
||||
overlap_weighted_projected_graph,
|
||||
generic_weighted_projected_graph
|
||||
projected_graph
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
|
||||
Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
||||
of Social Network Analysis. Sage Publications.
|
||||
"""
|
||||
if B.is_directed():
|
||||
pred = B.pred
|
||||
G = nx.DiGraph()
|
||||
else:
|
||||
pred = B.adj
|
||||
G = nx.Graph()
|
||||
G.graph.update(B.graph)
|
||||
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
||||
n_top = float(len(B) - len(nodes))
|
||||
for u in nodes:
|
||||
unbrs = set(B[u])
|
||||
nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u}
|
||||
for v in nbrs2:
|
||||
vnbrs = set(pred[v])
|
||||
common = unbrs & vnbrs
|
||||
if not ratio:
|
||||
weight = len(common)
|
||||
else:
|
||||
weight = len(common) / n_top
|
||||
G.add_edge(u, v, weight=weight)
|
||||
return G
|
||||
|
||||
|
||||
@not_implemented_for("multigraph")
|
||||
def collaboration_weighted_projected_graph(B, nodes):
|
||||
r"""Newman's weighted projection of B onto one of its node sets.
|
||||
|
||||
The collaboration weighted projection is the projection of the
|
||||
bipartite network B onto the specified nodes with weights assigned
|
||||
using Newman's collaboration model [1]_:
|
||||
|
||||
.. math::
|
||||
|
||||
w_{u, v} = \sum_k \frac{\delta_{u}^{k} \delta_{v}^{k}}{d_k - 1}
|
||||
|
||||
where `u` and `v` are nodes from the bottom bipartite node set,
|
||||
and `k` is a node of the top node set.
|
||||
The value `d_k` is the degree of node `k` in the bipartite
|
||||
network and `\delta_{u}^{k}` is 1 if node `u` is
|
||||
linked to node `k` in the original bipartite graph or 0 otherwise.
|
||||
|
||||
The nodes retain their attributes and are connected in the resulting
|
||||
graph if have an edge to a common node in the original bipartite
|
||||
graph.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
B : NetworkX graph
|
||||
The input graph should be bipartite.
|
||||
|
||||
nodes : list or iterable
|
||||
Nodes to project onto (the "bottom" nodes).
|
||||
|
||||
Returns
|
||||
-------
|
||||
Graph : NetworkX graph
|
||||
A graph that is the projection onto the given nodes.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> B = nx.path_graph(5)
|
||||
>>> B.add_edge(1, 5)
|
||||
>>> G = bipartite.collaboration_weighted_projected_graph(B, [0, 2, 4, 5])
|
||||
>>> list(G)
|
||||
[0, 2, 4, 5]
|
||||
>>> for edge in sorted(G.edges(data=True)):
|
||||
... print(edge)
|
||||
...
|
||||
(0, 2, {'weight': 0.5})
|
||||
(0, 5, {'weight': 0.5})
|
||||
(2, 4, {'weight': 1.0})
|
||||
(2, 5, {'weight': 0.5})
|
||||
|
||||
Notes
|
||||
-----
|
||||
No attempt is made to verify that the input graph B is bipartite.
|
||||
The graph and node properties are (shallow) copied to the projected graph.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
is_bipartite,
|
||||
is_bipartite_node_set,
|
||||
sets,
|
||||
weighted_projected_graph,
|
||||
overlap_weighted_projected_graph,
|
||||
generic_weighted_projected_graph,
|
||||
projected_graph
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Scientific collaboration networks: II.
|
||||
Shortest paths, weighted networks, and centrality,
|
||||
M. E. J. Newman, Phys. Rev. E 64, 016132 (2001).
|
||||
"""
|
||||
if B.is_directed():
|
||||
pred = B.pred
|
||||
G = nx.DiGraph()
|
||||
else:
|
||||
pred = B.adj
|
||||
G = nx.Graph()
|
||||
G.graph.update(B.graph)
|
||||
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
||||
for u in nodes:
|
||||
unbrs = set(B[u])
|
||||
nbrs2 = {n for nbr in unbrs for n in B[nbr] if n != u}
|
||||
for v in nbrs2:
|
||||
vnbrs = set(pred[v])
|
||||
common_degree = (len(B[n]) for n in unbrs & vnbrs)
|
||||
weight = sum(1.0 / (deg - 1) for deg in common_degree if deg > 1)
|
||||
G.add_edge(u, v, weight=weight)
|
||||
return G
|
||||
|
||||
|
||||
@not_implemented_for("multigraph")
|
||||
def overlap_weighted_projected_graph(B, nodes, jaccard=True):
|
||||
r"""Overlap weighted projection of B onto one of its node sets.
|
||||
|
||||
The overlap weighted projection is the projection of the bipartite
|
||||
network B onto the specified nodes with weights representing
|
||||
the Jaccard index between the neighborhoods of the two nodes in the
|
||||
original bipartite network [1]_:
|
||||
|
||||
.. math::
|
||||
|
||||
w_{v, u} = \frac{|N(u) \cap N(v)|}{|N(u) \cup N(v)|}
|
||||
|
||||
or if the parameter 'jaccard' is False, the fraction of common
|
||||
neighbors by minimum of both nodes degree in the original
|
||||
bipartite graph [1]_:
|
||||
|
||||
.. math::
|
||||
|
||||
w_{v, u} = \frac{|N(u) \cap N(v)|}{min(|N(u)|, |N(v)|)}
|
||||
|
||||
The nodes retain their attributes and are connected in the resulting
|
||||
graph if have an edge to a common node in the original bipartite graph.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
B : NetworkX graph
|
||||
The input graph should be bipartite.
|
||||
|
||||
nodes : list or iterable
|
||||
Nodes to project onto (the "bottom" nodes).
|
||||
|
||||
jaccard: Bool (default=True)
|
||||
|
||||
Returns
|
||||
-------
|
||||
Graph : NetworkX graph
|
||||
A graph that is the projection onto the given nodes.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> B = nx.path_graph(5)
|
||||
>>> nodes = [0, 2, 4]
|
||||
>>> G = bipartite.overlap_weighted_projected_graph(B, nodes)
|
||||
>>> list(G)
|
||||
[0, 2, 4]
|
||||
>>> list(G.edges(data=True))
|
||||
[(0, 2, {'weight': 0.5}), (2, 4, {'weight': 0.5})]
|
||||
>>> G = bipartite.overlap_weighted_projected_graph(B, nodes, jaccard=False)
|
||||
>>> list(G.edges(data=True))
|
||||
[(0, 2, {'weight': 1.0}), (2, 4, {'weight': 1.0})]
|
||||
|
||||
Notes
|
||||
-----
|
||||
No attempt is made to verify that the input graph B is bipartite.
|
||||
The graph and node properties are (shallow) copied to the projected graph.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
is_bipartite,
|
||||
is_bipartite_node_set,
|
||||
sets,
|
||||
weighted_projected_graph,
|
||||
collaboration_weighted_projected_graph,
|
||||
generic_weighted_projected_graph,
|
||||
projected_graph
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Borgatti, S.P. and Halgin, D. In press. Analyzing Affiliation
|
||||
Networks. In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
||||
of Social Network Analysis. Sage Publications.
|
||||
|
||||
"""
|
||||
if B.is_directed():
|
||||
pred = B.pred
|
||||
G = nx.DiGraph()
|
||||
else:
|
||||
pred = B.adj
|
||||
G = nx.Graph()
|
||||
G.graph.update(B.graph)
|
||||
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
||||
for u in nodes:
|
||||
unbrs = set(B[u])
|
||||
nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u}
|
||||
for v in nbrs2:
|
||||
vnbrs = set(pred[v])
|
||||
if jaccard:
|
||||
wt = float(len(unbrs & vnbrs)) / len(unbrs | vnbrs)
|
||||
else:
|
||||
wt = float(len(unbrs & vnbrs)) / min(len(unbrs), len(vnbrs))
|
||||
G.add_edge(u, v, weight=wt)
|
||||
return G
|
||||
|
||||
|
||||
@not_implemented_for("multigraph")
|
||||
def generic_weighted_projected_graph(B, nodes, weight_function=None):
|
||||
r"""Weighted projection of B with a user-specified weight function.
|
||||
|
||||
The bipartite network B is projected on to the specified nodes
|
||||
with weights computed by a user-specified function. This function
|
||||
must accept as a parameter the neighborhood sets of two nodes and
|
||||
return an integer or a float.
|
||||
|
||||
The nodes retain their attributes and are connected in the resulting graph
|
||||
if they have an edge to a common node in the original graph.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
B : NetworkX graph
|
||||
The input graph should be bipartite.
|
||||
|
||||
nodes : list or iterable
|
||||
Nodes to project onto (the "bottom" nodes).
|
||||
|
||||
weight_function : function
|
||||
This function must accept as parameters the same input graph
|
||||
that this function, and two nodes; and return an integer or a float.
|
||||
The default function computes the number of shared neighbors.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Graph : NetworkX graph
|
||||
A graph that is the projection onto the given nodes.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> # Define some custom weight functions
|
||||
>>> def jaccard(G, u, v):
|
||||
... unbrs = set(G[u])
|
||||
... vnbrs = set(G[v])
|
||||
... return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs)
|
||||
...
|
||||
>>> def my_weight(G, u, v, weight="weight"):
|
||||
... w = 0
|
||||
... for nbr in set(G[u]) & set(G[v]):
|
||||
... w += G[u][nbr].get(weight, 1) + G[v][nbr].get(weight, 1)
|
||||
... return w
|
||||
...
|
||||
>>> # A complete bipartite graph with 4 nodes and 4 edges
|
||||
>>> B = nx.complete_bipartite_graph(2, 2)
|
||||
>>> # Add some arbitrary weight to the edges
|
||||
>>> for i, (u, v) in enumerate(B.edges()):
|
||||
... B.edges[u, v]["weight"] = i + 1
|
||||
...
|
||||
>>> for edge in B.edges(data=True):
|
||||
... print(edge)
|
||||
...
|
||||
(0, 2, {'weight': 1})
|
||||
(0, 3, {'weight': 2})
|
||||
(1, 2, {'weight': 3})
|
||||
(1, 3, {'weight': 4})
|
||||
>>> # By default, the weight is the number of shared neighbors
|
||||
>>> G = bipartite.generic_weighted_projected_graph(B, [0, 1])
|
||||
>>> print(list(G.edges(data=True)))
|
||||
[(0, 1, {'weight': 2})]
|
||||
>>> # To specify a custom weight function use the weight_function parameter
|
||||
>>> G = bipartite.generic_weighted_projected_graph(
|
||||
... B, [0, 1], weight_function=jaccard
|
||||
... )
|
||||
>>> print(list(G.edges(data=True)))
|
||||
[(0, 1, {'weight': 1.0})]
|
||||
>>> G = bipartite.generic_weighted_projected_graph(
|
||||
... B, [0, 1], weight_function=my_weight
|
||||
... )
|
||||
>>> print(list(G.edges(data=True)))
|
||||
[(0, 1, {'weight': 10})]
|
||||
|
||||
Notes
|
||||
-----
|
||||
No attempt is made to verify that the input graph B is bipartite.
|
||||
The graph and node properties are (shallow) copied to the projected graph.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
is_bipartite,
|
||||
is_bipartite_node_set,
|
||||
sets,
|
||||
weighted_projected_graph,
|
||||
collaboration_weighted_projected_graph,
|
||||
overlap_weighted_projected_graph,
|
||||
projected_graph
|
||||
|
||||
"""
|
||||
if B.is_directed():
|
||||
pred = B.pred
|
||||
G = nx.DiGraph()
|
||||
else:
|
||||
pred = B.adj
|
||||
G = nx.Graph()
|
||||
if weight_function is None:
|
||||
|
||||
def weight_function(G, u, v):
|
||||
# Notice that we use set(pred[v]) for handling the directed case.
|
||||
return len(set(G[u]) & set(pred[v]))
|
||||
|
||||
G.graph.update(B.graph)
|
||||
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
||||
for u in nodes:
|
||||
nbrs2 = {n for nbr in set(B[u]) for n in B[nbr]} - {u}
|
||||
for v in nbrs2:
|
||||
weight = weight_function(B, u, v)
|
||||
G.add_edge(u, v, weight=weight)
|
||||
return G
|
||||
|
||||
|
||||
def project(B, nodes, create_using=None):
|
||||
return projected_graph(B, nodes)
|
|
@ -0,0 +1,111 @@
|
|||
"""Node redundancy for bipartite graphs."""
|
||||
from itertools import combinations
|
||||
|
||||
from networkx import NetworkXError
|
||||
|
||||
__all__ = ["node_redundancy"]
|
||||
|
||||
|
||||
def node_redundancy(G, nodes=None):
|
||||
r"""Computes the node redundancy coefficients for the nodes in the bipartite
|
||||
graph `G`.
|
||||
|
||||
The redundancy coefficient of a node `v` is the fraction of pairs of
|
||||
neighbors of `v` that are both linked to other nodes. In a one-mode
|
||||
projection these nodes would be linked together even if `v` were
|
||||
not there.
|
||||
|
||||
More formally, for any vertex `v`, the *redundancy coefficient of `v`* is
|
||||
defined by
|
||||
|
||||
.. math::
|
||||
|
||||
rc(v) = \frac{|\{\{u, w\} \subseteq N(v),
|
||||
\: \exists v' \neq v,\: (v',u) \in E\:
|
||||
\mathrm{and}\: (v',w) \in E\}|}{ \frac{|N(v)|(|N(v)|-1)}{2}},
|
||||
|
||||
where `N(v)` is the set of neighbors of `v` in `G`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A bipartite graph
|
||||
|
||||
nodes : list or iterable (optional)
|
||||
Compute redundancy for these nodes. The default is all nodes in G.
|
||||
|
||||
Returns
|
||||
-------
|
||||
redundancy : dictionary
|
||||
A dictionary keyed by node with the node redundancy value.
|
||||
|
||||
Examples
|
||||
--------
|
||||
Compute the redundancy coefficient of each node in a graph::
|
||||
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.cycle_graph(4)
|
||||
>>> rc = bipartite.node_redundancy(G)
|
||||
>>> rc[0]
|
||||
1.0
|
||||
|
||||
Compute the average redundancy for the graph::
|
||||
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.cycle_graph(4)
|
||||
>>> rc = bipartite.node_redundancy(G)
|
||||
>>> sum(rc.values()) / len(G)
|
||||
1.0
|
||||
|
||||
Compute the average redundancy for a set of nodes::
|
||||
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.cycle_graph(4)
|
||||
>>> rc = bipartite.node_redundancy(G)
|
||||
>>> nodes = [0, 2]
|
||||
>>> sum(rc[n] for n in nodes) / len(nodes)
|
||||
1.0
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If any of the nodes in the graph (or in `nodes`, if specified) has
|
||||
(out-)degree less than two (which would result in division by zero,
|
||||
according to the definition of the redundancy coefficient).
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
|
||||
Basic notions for the analysis of large two-mode networks.
|
||||
Social Networks 30(1), 31--48.
|
||||
|
||||
"""
|
||||
if nodes is None:
|
||||
nodes = G
|
||||
if any(len(G[v]) < 2 for v in nodes):
|
||||
raise NetworkXError(
|
||||
"Cannot compute redundancy coefficient for a node"
|
||||
" that has fewer than two neighbors."
|
||||
)
|
||||
# TODO This can be trivially parallelized.
|
||||
return {v: _node_redundancy(G, v) for v in nodes}
|
||||
|
||||
|
||||
def _node_redundancy(G, v):
|
||||
"""Returns the redundancy of the node `v` in the bipartite graph `G`.
|
||||
|
||||
If `G` is a graph with `n` nodes, the redundancy of a node is the ratio
|
||||
of the "overlap" of `v` to the maximum possible overlap of `v`
|
||||
according to its degree. The overlap of `v` is the number of pairs of
|
||||
neighbors that have mutual neighbors themselves, other than `v`.
|
||||
|
||||
`v` must have at least two neighbors in `G`.
|
||||
|
||||
"""
|
||||
n = len(G[v])
|
||||
# TODO On Python 3, we could just use `G[u].keys() & G[w].keys()` instead
|
||||
# of instantiating the entire sets.
|
||||
overlap = sum(
|
||||
1 for (u, w) in combinations(G[v], 2) if (set(G[u]) & set(G[w])) - {v}
|
||||
)
|
||||
return (2 * overlap) / (n * (n - 1))
|
|
@ -0,0 +1,71 @@
|
|||
"""
|
||||
Spectral bipartivity measure.
|
||||
"""
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["spectral_bipartivity"]
|
||||
|
||||
|
||||
def spectral_bipartivity(G, nodes=None, weight="weight"):
|
||||
"""Returns the spectral bipartivity.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
nodes : list or container optional(default is all nodes)
|
||||
Nodes to return value of spectral bipartivity contribution.
|
||||
|
||||
weight : string or None optional (default = 'weight')
|
||||
Edge data key to use for edge weights. If None, weights set to 1.
|
||||
|
||||
Returns
|
||||
-------
|
||||
sb : float or dict
|
||||
A single number if the keyword nodes is not specified, or
|
||||
a dictionary keyed by node with the spectral bipartivity contribution
|
||||
of that node as the value.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> bipartite.spectral_bipartivity(G)
|
||||
1.0
|
||||
|
||||
Notes
|
||||
-----
|
||||
This implementation uses Numpy (dense) matrices which are not efficient
|
||||
for storing large sparse graphs.
|
||||
|
||||
See Also
|
||||
--------
|
||||
color
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of
|
||||
bipartivity in complex networks", PhysRev E 72, 046105 (2005)
|
||||
"""
|
||||
try:
|
||||
import scipy.linalg
|
||||
except ImportError as e:
|
||||
raise ImportError(
|
||||
"spectral_bipartivity() requires SciPy: ", "http://scipy.org/"
|
||||
) from e
|
||||
nodelist = list(G) # ordering of nodes in matrix
|
||||
A = nx.to_numpy_array(G, nodelist, weight=weight)
|
||||
expA = scipy.linalg.expm(A)
|
||||
expmA = scipy.linalg.expm(-A)
|
||||
coshA = 0.5 * (expA + expmA)
|
||||
if nodes is None:
|
||||
# return single number for entire graph
|
||||
return coshA.diagonal().sum() / expA.diagonal().sum()
|
||||
else:
|
||||
# contribution for individual nodes
|
||||
index = dict(zip(nodelist, range(len(nodelist))))
|
||||
sb = {}
|
||||
for n in nodes:
|
||||
i = index[n]
|
||||
sb[n] = coshA[i, i] / expA[i, i]
|
||||
return sb
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,121 @@
|
|||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms import bipartite
|
||||
|
||||
|
||||
class TestBipartiteBasic:
|
||||
def test_is_bipartite(self):
|
||||
assert bipartite.is_bipartite(nx.path_graph(4))
|
||||
assert bipartite.is_bipartite(nx.DiGraph([(1, 0)]))
|
||||
assert not bipartite.is_bipartite(nx.complete_graph(3))
|
||||
|
||||
def test_bipartite_color(self):
|
||||
G = nx.path_graph(4)
|
||||
c = bipartite.color(G)
|
||||
assert c == {0: 1, 1: 0, 2: 1, 3: 0}
|
||||
|
||||
def test_not_bipartite_color(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
c = bipartite.color(nx.complete_graph(4))
|
||||
|
||||
def test_bipartite_directed(self):
|
||||
G = bipartite.random_graph(10, 10, 0.1, directed=True)
|
||||
assert bipartite.is_bipartite(G)
|
||||
|
||||
def test_bipartite_sets(self):
|
||||
G = nx.path_graph(4)
|
||||
X, Y = bipartite.sets(G)
|
||||
assert X == {0, 2}
|
||||
assert Y == {1, 3}
|
||||
|
||||
def test_bipartite_sets_directed(self):
|
||||
G = nx.path_graph(4)
|
||||
D = G.to_directed()
|
||||
X, Y = bipartite.sets(D)
|
||||
assert X == {0, 2}
|
||||
assert Y == {1, 3}
|
||||
|
||||
def test_bipartite_sets_given_top_nodes(self):
|
||||
G = nx.path_graph(4)
|
||||
top_nodes = [0, 2]
|
||||
X, Y = bipartite.sets(G, top_nodes)
|
||||
assert X == {0, 2}
|
||||
assert Y == {1, 3}
|
||||
|
||||
def test_bipartite_sets_disconnected(self):
|
||||
with pytest.raises(nx.AmbiguousSolution):
|
||||
G = nx.path_graph(4)
|
||||
G.add_edges_from([(5, 6), (6, 7)])
|
||||
X, Y = bipartite.sets(G)
|
||||
|
||||
def test_is_bipartite_node_set(self):
|
||||
G = nx.path_graph(4)
|
||||
assert bipartite.is_bipartite_node_set(G, [0, 2])
|
||||
assert bipartite.is_bipartite_node_set(G, [1, 3])
|
||||
assert not bipartite.is_bipartite_node_set(G, [1, 2])
|
||||
G.add_edge(10, 20)
|
||||
assert bipartite.is_bipartite_node_set(G, [0, 2, 10])
|
||||
assert bipartite.is_bipartite_node_set(G, [0, 2, 20])
|
||||
assert bipartite.is_bipartite_node_set(G, [1, 3, 10])
|
||||
assert bipartite.is_bipartite_node_set(G, [1, 3, 20])
|
||||
|
||||
def test_bipartite_density(self):
|
||||
G = nx.path_graph(5)
|
||||
X, Y = bipartite.sets(G)
|
||||
density = float(len(list(G.edges()))) / (len(X) * len(Y))
|
||||
assert bipartite.density(G, X) == density
|
||||
D = nx.DiGraph(G.edges())
|
||||
assert bipartite.density(D, X) == density / 2.0
|
||||
assert bipartite.density(nx.Graph(), {}) == 0.0
|
||||
|
||||
def test_bipartite_degrees(self):
|
||||
G = nx.path_graph(5)
|
||||
X = {1, 3}
|
||||
Y = {0, 2, 4}
|
||||
u, d = bipartite.degrees(G, Y)
|
||||
assert dict(u) == {1: 2, 3: 2}
|
||||
assert dict(d) == {0: 1, 2: 2, 4: 1}
|
||||
|
||||
def test_bipartite_weighted_degrees(self):
|
||||
G = nx.path_graph(5)
|
||||
G.add_edge(0, 1, weight=0.1, other=0.2)
|
||||
X = {1, 3}
|
||||
Y = {0, 2, 4}
|
||||
u, d = bipartite.degrees(G, Y, weight="weight")
|
||||
assert dict(u) == {1: 1.1, 3: 2}
|
||||
assert dict(d) == {0: 0.1, 2: 2, 4: 1}
|
||||
u, d = bipartite.degrees(G, Y, weight="other")
|
||||
assert dict(u) == {1: 1.2, 3: 2}
|
||||
assert dict(d) == {0: 0.2, 2: 2, 4: 1}
|
||||
|
||||
def test_biadjacency_matrix_weight(self):
|
||||
scipy = pytest.importorskip("scipy")
|
||||
G = nx.path_graph(5)
|
||||
G.add_edge(0, 1, weight=2, other=4)
|
||||
X = [1, 3]
|
||||
Y = [0, 2, 4]
|
||||
M = bipartite.biadjacency_matrix(G, X, weight="weight")
|
||||
assert M[0, 0] == 2
|
||||
M = bipartite.biadjacency_matrix(G, X, weight="other")
|
||||
assert M[0, 0] == 4
|
||||
|
||||
def test_biadjacency_matrix(self):
|
||||
scipy = pytest.importorskip("scipy")
|
||||
tops = [2, 5, 10]
|
||||
bots = [5, 10, 15]
|
||||
for i in range(len(tops)):
|
||||
G = bipartite.random_graph(tops[i], bots[i], 0.2)
|
||||
top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
|
||||
M = bipartite.biadjacency_matrix(G, top)
|
||||
assert M.shape[0] == tops[i]
|
||||
assert M.shape[1] == bots[i]
|
||||
|
||||
def test_biadjacency_matrix_order(self):
|
||||
scipy = pytest.importorskip("scipy")
|
||||
G = nx.path_graph(5)
|
||||
G.add_edge(0, 1, weight=2)
|
||||
X = [3, 1]
|
||||
Y = [4, 2, 0]
|
||||
M = bipartite.biadjacency_matrix(G, X, Y, weight="weight")
|
||||
assert M[1, 2] == 2
|
|
@ -0,0 +1,175 @@
|
|||
import networkx as nx
|
||||
from networkx.algorithms import bipartite
|
||||
from networkx.testing import almost_equal
|
||||
|
||||
|
||||
class TestBipartiteCentrality:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
cls.P4 = nx.path_graph(4)
|
||||
cls.K3 = nx.complete_bipartite_graph(3, 3)
|
||||
cls.C4 = nx.cycle_graph(4)
|
||||
cls.davis = nx.davis_southern_women_graph()
|
||||
cls.top_nodes = [
|
||||
n for n, d in cls.davis.nodes(data=True) if d["bipartite"] == 0
|
||||
]
|
||||
|
||||
def test_degree_centrality(self):
|
||||
d = bipartite.degree_centrality(self.P4, [1, 3])
|
||||
answer = {0: 0.5, 1: 1.0, 2: 1.0, 3: 0.5}
|
||||
assert d == answer
|
||||
d = bipartite.degree_centrality(self.K3, [0, 1, 2])
|
||||
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
|
||||
assert d == answer
|
||||
d = bipartite.degree_centrality(self.C4, [0, 2])
|
||||
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
|
||||
assert d == answer
|
||||
|
||||
def test_betweenness_centrality(self):
|
||||
c = bipartite.betweenness_centrality(self.P4, [1, 3])
|
||||
answer = {0: 0.0, 1: 1.0, 2: 1.0, 3: 0.0}
|
||||
assert c == answer
|
||||
c = bipartite.betweenness_centrality(self.K3, [0, 1, 2])
|
||||
answer = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125, 4: 0.125, 5: 0.125}
|
||||
assert c == answer
|
||||
c = bipartite.betweenness_centrality(self.C4, [0, 2])
|
||||
answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
|
||||
assert c == answer
|
||||
|
||||
def test_closeness_centrality(self):
|
||||
c = bipartite.closeness_centrality(self.P4, [1, 3])
|
||||
answer = {0: 2.0 / 3, 1: 1.0, 2: 1.0, 3: 2.0 / 3}
|
||||
assert c == answer
|
||||
c = bipartite.closeness_centrality(self.K3, [0, 1, 2])
|
||||
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
|
||||
assert c == answer
|
||||
c = bipartite.closeness_centrality(self.C4, [0, 2])
|
||||
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
|
||||
assert c == answer
|
||||
G = nx.Graph()
|
||||
G.add_node(0)
|
||||
G.add_node(1)
|
||||
c = bipartite.closeness_centrality(G, [0])
|
||||
assert c == {1: 0.0}
|
||||
c = bipartite.closeness_centrality(G, [1])
|
||||
assert c == {1: 0.0}
|
||||
|
||||
def test_davis_degree_centrality(self):
|
||||
G = self.davis
|
||||
deg = bipartite.degree_centrality(G, self.top_nodes)
|
||||
answer = {
|
||||
"E8": 0.78,
|
||||
"E9": 0.67,
|
||||
"E7": 0.56,
|
||||
"Nora Fayette": 0.57,
|
||||
"Evelyn Jefferson": 0.57,
|
||||
"Theresa Anderson": 0.57,
|
||||
"E6": 0.44,
|
||||
"Sylvia Avondale": 0.50,
|
||||
"Laura Mandeville": 0.50,
|
||||
"Brenda Rogers": 0.50,
|
||||
"Katherina Rogers": 0.43,
|
||||
"E5": 0.44,
|
||||
"Helen Lloyd": 0.36,
|
||||
"E3": 0.33,
|
||||
"Ruth DeSand": 0.29,
|
||||
"Verne Sanderson": 0.29,
|
||||
"E12": 0.33,
|
||||
"Myra Liddel": 0.29,
|
||||
"E11": 0.22,
|
||||
"Eleanor Nye": 0.29,
|
||||
"Frances Anderson": 0.29,
|
||||
"Pearl Oglethorpe": 0.21,
|
||||
"E4": 0.22,
|
||||
"Charlotte McDowd": 0.29,
|
||||
"E10": 0.28,
|
||||
"Olivia Carleton": 0.14,
|
||||
"Flora Price": 0.14,
|
||||
"E2": 0.17,
|
||||
"E1": 0.17,
|
||||
"Dorothy Murchison": 0.14,
|
||||
"E13": 0.17,
|
||||
"E14": 0.17,
|
||||
}
|
||||
for node, value in answer.items():
|
||||
assert almost_equal(value, deg[node], places=2)
|
||||
|
||||
def test_davis_betweenness_centrality(self):
|
||||
G = self.davis
|
||||
bet = bipartite.betweenness_centrality(G, self.top_nodes)
|
||||
answer = {
|
||||
"E8": 0.24,
|
||||
"E9": 0.23,
|
||||
"E7": 0.13,
|
||||
"Nora Fayette": 0.11,
|
||||
"Evelyn Jefferson": 0.10,
|
||||
"Theresa Anderson": 0.09,
|
||||
"E6": 0.07,
|
||||
"Sylvia Avondale": 0.07,
|
||||
"Laura Mandeville": 0.05,
|
||||
"Brenda Rogers": 0.05,
|
||||
"Katherina Rogers": 0.05,
|
||||
"E5": 0.04,
|
||||
"Helen Lloyd": 0.04,
|
||||
"E3": 0.02,
|
||||
"Ruth DeSand": 0.02,
|
||||
"Verne Sanderson": 0.02,
|
||||
"E12": 0.02,
|
||||
"Myra Liddel": 0.02,
|
||||
"E11": 0.02,
|
||||
"Eleanor Nye": 0.01,
|
||||
"Frances Anderson": 0.01,
|
||||
"Pearl Oglethorpe": 0.01,
|
||||
"E4": 0.01,
|
||||
"Charlotte McDowd": 0.01,
|
||||
"E10": 0.01,
|
||||
"Olivia Carleton": 0.01,
|
||||
"Flora Price": 0.01,
|
||||
"E2": 0.00,
|
||||
"E1": 0.00,
|
||||
"Dorothy Murchison": 0.00,
|
||||
"E13": 0.00,
|
||||
"E14": 0.00,
|
||||
}
|
||||
for node, value in answer.items():
|
||||
assert almost_equal(value, bet[node], places=2)
|
||||
|
||||
def test_davis_closeness_centrality(self):
|
||||
G = self.davis
|
||||
clos = bipartite.closeness_centrality(G, self.top_nodes)
|
||||
answer = {
|
||||
"E8": 0.85,
|
||||
"E9": 0.79,
|
||||
"E7": 0.73,
|
||||
"Nora Fayette": 0.80,
|
||||
"Evelyn Jefferson": 0.80,
|
||||
"Theresa Anderson": 0.80,
|
||||
"E6": 0.69,
|
||||
"Sylvia Avondale": 0.77,
|
||||
"Laura Mandeville": 0.73,
|
||||
"Brenda Rogers": 0.73,
|
||||
"Katherina Rogers": 0.73,
|
||||
"E5": 0.59,
|
||||
"Helen Lloyd": 0.73,
|
||||
"E3": 0.56,
|
||||
"Ruth DeSand": 0.71,
|
||||
"Verne Sanderson": 0.71,
|
||||
"E12": 0.56,
|
||||
"Myra Liddel": 0.69,
|
||||
"E11": 0.54,
|
||||
"Eleanor Nye": 0.67,
|
||||
"Frances Anderson": 0.67,
|
||||
"Pearl Oglethorpe": 0.67,
|
||||
"E4": 0.54,
|
||||
"Charlotte McDowd": 0.60,
|
||||
"E10": 0.55,
|
||||
"Olivia Carleton": 0.59,
|
||||
"Flora Price": 0.59,
|
||||
"E2": 0.52,
|
||||
"E1": 0.52,
|
||||
"Dorothy Murchison": 0.65,
|
||||
"E13": 0.52,
|
||||
"E14": 0.52,
|
||||
}
|
||||
for node, value in answer.items():
|
||||
assert almost_equal(value, clos[node], places=2)
|
|
@ -0,0 +1,83 @@
|
|||
import networkx as nx
|
||||
import pytest
|
||||
from networkx.algorithms.bipartite.cluster import cc_dot, cc_min, cc_max
|
||||
import networkx.algorithms.bipartite as bipartite
|
||||
|
||||
|
||||
def test_pairwise_bipartite_cc_functions():
|
||||
# Test functions for different kinds of bipartite clustering coefficients
|
||||
# between pairs of nodes using 3 example graphs from figure 5 p. 40
|
||||
# Latapy et al (2008)
|
||||
G1 = nx.Graph([(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7)])
|
||||
G2 = nx.Graph([(0, 2), (0, 3), (0, 4), (1, 3), (1, 4), (1, 5)])
|
||||
G3 = nx.Graph(
|
||||
[(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7), (1, 8), (1, 9)]
|
||||
)
|
||||
result = {
|
||||
0: [1 / 3.0, 2 / 3.0, 2 / 5.0],
|
||||
1: [1 / 2.0, 2 / 3.0, 2 / 3.0],
|
||||
2: [2 / 8.0, 2 / 5.0, 2 / 5.0],
|
||||
}
|
||||
for i, G in enumerate([G1, G2, G3]):
|
||||
assert bipartite.is_bipartite(G)
|
||||
assert cc_dot(set(G[0]), set(G[1])) == result[i][0]
|
||||
assert cc_min(set(G[0]), set(G[1])) == result[i][1]
|
||||
assert cc_max(set(G[0]), set(G[1])) == result[i][2]
|
||||
|
||||
|
||||
def test_star_graph():
|
||||
G = nx.star_graph(3)
|
||||
# all modes are the same
|
||||
answer = {0: 0, 1: 1, 2: 1, 3: 1}
|
||||
assert bipartite.clustering(G, mode="dot") == answer
|
||||
assert bipartite.clustering(G, mode="min") == answer
|
||||
assert bipartite.clustering(G, mode="max") == answer
|
||||
|
||||
|
||||
def test_not_bipartite():
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.clustering(nx.complete_graph(4))
|
||||
|
||||
|
||||
def test_bad_mode():
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.clustering(nx.path_graph(4), mode="foo")
|
||||
|
||||
|
||||
def test_path_graph():
|
||||
G = nx.path_graph(4)
|
||||
answer = {0: 0.5, 1: 0.5, 2: 0.5, 3: 0.5}
|
||||
assert bipartite.clustering(G, mode="dot") == answer
|
||||
assert bipartite.clustering(G, mode="max") == answer
|
||||
answer = {0: 1, 1: 1, 2: 1, 3: 1}
|
||||
assert bipartite.clustering(G, mode="min") == answer
|
||||
|
||||
|
||||
def test_average_path_graph():
|
||||
G = nx.path_graph(4)
|
||||
assert bipartite.average_clustering(G, mode="dot") == 0.5
|
||||
assert bipartite.average_clustering(G, mode="max") == 0.5
|
||||
assert bipartite.average_clustering(G, mode="min") == 1
|
||||
|
||||
|
||||
def test_ra_clustering_davis():
|
||||
G = nx.davis_southern_women_graph()
|
||||
cc4 = round(bipartite.robins_alexander_clustering(G), 3)
|
||||
assert cc4 == 0.468
|
||||
|
||||
|
||||
def test_ra_clustering_square():
|
||||
G = nx.path_graph(4)
|
||||
G.add_edge(0, 3)
|
||||
assert bipartite.robins_alexander_clustering(G) == 1.0
|
||||
|
||||
|
||||
def test_ra_clustering_zero():
|
||||
G = nx.Graph()
|
||||
assert bipartite.robins_alexander_clustering(G) == 0
|
||||
G.add_nodes_from(range(4))
|
||||
assert bipartite.robins_alexander_clustering(G) == 0
|
||||
G.add_edges_from([(0, 1), (2, 3), (3, 4)])
|
||||
assert bipartite.robins_alexander_clustering(G) == 0
|
||||
G.add_edge(1, 2)
|
||||
assert bipartite.robins_alexander_clustering(G) == 0
|
|
@ -0,0 +1,33 @@
|
|||
import networkx as nx
|
||||
import networkx.algorithms.bipartite as bipartite
|
||||
|
||||
|
||||
class TestMinEdgeCover:
|
||||
"""Tests for :func:`networkx.algorithms.bipartite.min_edge_cover`"""
|
||||
|
||||
def test_empty_graph(self):
|
||||
G = nx.Graph()
|
||||
assert bipartite.min_edge_cover(G) == set()
|
||||
|
||||
def test_graph_single_edge(self):
|
||||
G = nx.Graph()
|
||||
G.add_edge(0, 1)
|
||||
assert bipartite.min_edge_cover(G) == {(0, 1), (1, 0)}
|
||||
|
||||
def test_bipartite_default(self):
|
||||
G = nx.Graph()
|
||||
G.add_nodes_from([1, 2, 3, 4], bipartite=0)
|
||||
G.add_nodes_from(["a", "b", "c"], bipartite=1)
|
||||
G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
|
||||
min_cover = bipartite.min_edge_cover(G)
|
||||
assert nx.is_edge_cover(G, min_cover)
|
||||
assert len(min_cover) == 8
|
||||
|
||||
def test_bipartite_explicit(self):
|
||||
G = nx.Graph()
|
||||
G.add_nodes_from([1, 2, 3, 4], bipartite=0)
|
||||
G.add_nodes_from(["a", "b", "c"], bipartite=1)
|
||||
G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
|
||||
min_cover = bipartite.min_edge_cover(G, bipartite.eppstein_matching)
|
||||
assert nx.is_edge_cover(G, min_cover)
|
||||
assert len(min_cover) == 8
|
|
@ -0,0 +1,191 @@
|
|||
"""
|
||||
Unit tests for bipartite edgelists.
|
||||
"""
|
||||
import pytest
|
||||
import io
|
||||
import tempfile
|
||||
import os
|
||||
|
||||
import networkx as nx
|
||||
from networkx.testing import assert_edges_equal, assert_nodes_equal, assert_graphs_equal
|
||||
from networkx.algorithms import bipartite
|
||||
|
||||
|
||||
class TestEdgelist:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
cls.G = nx.Graph(name="test")
|
||||
e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")]
|
||||
cls.G.add_edges_from(e)
|
||||
cls.G.add_nodes_from(["a", "c", "e"], bipartite=0)
|
||||
cls.G.add_nodes_from(["b", "d", "f"], bipartite=1)
|
||||
cls.G.add_node("g", bipartite=0)
|
||||
cls.DG = nx.DiGraph(cls.G)
|
||||
cls.MG = nx.MultiGraph()
|
||||
cls.MG.add_edges_from([(1, 2), (1, 2), (1, 2)])
|
||||
cls.MG.add_node(1, bipartite=0)
|
||||
cls.MG.add_node(2, bipartite=1)
|
||||
|
||||
def test_read_edgelist_1(self):
|
||||
s = b"""\
|
||||
# comment line
|
||||
1 2
|
||||
# comment line
|
||||
2 3
|
||||
"""
|
||||
bytesIO = io.BytesIO(s)
|
||||
G = bipartite.read_edgelist(bytesIO, nodetype=int)
|
||||
assert_edges_equal(G.edges(), [(1, 2), (2, 3)])
|
||||
|
||||
def test_read_edgelist_3(self):
|
||||
s = b"""\
|
||||
# comment line
|
||||
1 2 {'weight':2.0}
|
||||
# comment line
|
||||
2 3 {'weight':3.0}
|
||||
"""
|
||||
bytesIO = io.BytesIO(s)
|
||||
G = bipartite.read_edgelist(bytesIO, nodetype=int, data=False)
|
||||
assert_edges_equal(G.edges(), [(1, 2), (2, 3)])
|
||||
|
||||
bytesIO = io.BytesIO(s)
|
||||
G = bipartite.read_edgelist(bytesIO, nodetype=int, data=True)
|
||||
assert_edges_equal(
|
||||
G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})]
|
||||
)
|
||||
|
||||
def test_write_edgelist_1(self):
|
||||
fh = io.BytesIO()
|
||||
G = nx.Graph()
|
||||
G.add_edges_from([(1, 2), (2, 3)])
|
||||
G.add_node(1, bipartite=0)
|
||||
G.add_node(2, bipartite=1)
|
||||
G.add_node(3, bipartite=0)
|
||||
bipartite.write_edgelist(G, fh, data=False)
|
||||
fh.seek(0)
|
||||
assert fh.read() == b"1 2\n3 2\n"
|
||||
|
||||
def test_write_edgelist_2(self):
|
||||
fh = io.BytesIO()
|
||||
G = nx.Graph()
|
||||
G.add_edges_from([(1, 2), (2, 3)])
|
||||
G.add_node(1, bipartite=0)
|
||||
G.add_node(2, bipartite=1)
|
||||
G.add_node(3, bipartite=0)
|
||||
bipartite.write_edgelist(G, fh, data=True)
|
||||
fh.seek(0)
|
||||
assert fh.read() == b"1 2 {}\n3 2 {}\n"
|
||||
|
||||
def test_write_edgelist_3(self):
|
||||
fh = io.BytesIO()
|
||||
G = nx.Graph()
|
||||
G.add_edge(1, 2, weight=2.0)
|
||||
G.add_edge(2, 3, weight=3.0)
|
||||
G.add_node(1, bipartite=0)
|
||||
G.add_node(2, bipartite=1)
|
||||
G.add_node(3, bipartite=0)
|
||||
bipartite.write_edgelist(G, fh, data=True)
|
||||
fh.seek(0)
|
||||
assert fh.read() == b"1 2 {'weight': 2.0}\n3 2 {'weight': 3.0}\n"
|
||||
|
||||
def test_write_edgelist_4(self):
|
||||
fh = io.BytesIO()
|
||||
G = nx.Graph()
|
||||
G.add_edge(1, 2, weight=2.0)
|
||||
G.add_edge(2, 3, weight=3.0)
|
||||
G.add_node(1, bipartite=0)
|
||||
G.add_node(2, bipartite=1)
|
||||
G.add_node(3, bipartite=0)
|
||||
bipartite.write_edgelist(G, fh, data=[("weight")])
|
||||
fh.seek(0)
|
||||
assert fh.read() == b"1 2 2.0\n3 2 3.0\n"
|
||||
|
||||
def test_unicode(self):
|
||||
G = nx.Graph()
|
||||
name1 = chr(2344) + chr(123) + chr(6543)
|
||||
name2 = chr(5543) + chr(1543) + chr(324)
|
||||
G.add_edge(name1, "Radiohead", **{name2: 3})
|
||||
G.add_node(name1, bipartite=0)
|
||||
G.add_node("Radiohead", bipartite=1)
|
||||
fd, fname = tempfile.mkstemp()
|
||||
bipartite.write_edgelist(G, fname)
|
||||
H = bipartite.read_edgelist(fname)
|
||||
assert_graphs_equal(G, H)
|
||||
os.close(fd)
|
||||
os.unlink(fname)
|
||||
|
||||
def test_latin1_issue(self):
|
||||
G = nx.Graph()
|
||||
name1 = chr(2344) + chr(123) + chr(6543)
|
||||
name2 = chr(5543) + chr(1543) + chr(324)
|
||||
G.add_edge(name1, "Radiohead", **{name2: 3})
|
||||
G.add_node(name1, bipartite=0)
|
||||
G.add_node("Radiohead", bipartite=1)
|
||||
fd, fname = tempfile.mkstemp()
|
||||
pytest.raises(
|
||||
UnicodeEncodeError, bipartite.write_edgelist, G, fname, encoding="latin-1"
|
||||
)
|
||||
os.close(fd)
|
||||
os.unlink(fname)
|
||||
|
||||
def test_latin1(self):
|
||||
G = nx.Graph()
|
||||
name1 = "Bj" + chr(246) + "rk"
|
||||
name2 = chr(220) + "ber"
|
||||
G.add_edge(name1, "Radiohead", **{name2: 3})
|
||||
G.add_node(name1, bipartite=0)
|
||||
G.add_node("Radiohead", bipartite=1)
|
||||
fd, fname = tempfile.mkstemp()
|
||||
bipartite.write_edgelist(G, fname, encoding="latin-1")
|
||||
H = bipartite.read_edgelist(fname, encoding="latin-1")
|
||||
assert_graphs_equal(G, H)
|
||||
os.close(fd)
|
||||
os.unlink(fname)
|
||||
|
||||
def test_edgelist_graph(self):
|
||||
G = self.G
|
||||
(fd, fname) = tempfile.mkstemp()
|
||||
bipartite.write_edgelist(G, fname)
|
||||
H = bipartite.read_edgelist(fname)
|
||||
H2 = bipartite.read_edgelist(fname)
|
||||
assert H != H2 # they should be different graphs
|
||||
G.remove_node("g") # isolated nodes are not written in edgelist
|
||||
assert_nodes_equal(list(H), list(G))
|
||||
assert_edges_equal(list(H.edges()), list(G.edges()))
|
||||
os.close(fd)
|
||||
os.unlink(fname)
|
||||
|
||||
def test_edgelist_integers(self):
|
||||
G = nx.convert_node_labels_to_integers(self.G)
|
||||
(fd, fname) = tempfile.mkstemp()
|
||||
bipartite.write_edgelist(G, fname)
|
||||
H = bipartite.read_edgelist(fname, nodetype=int)
|
||||
# isolated nodes are not written in edgelist
|
||||
G.remove_nodes_from(list(nx.isolates(G)))
|
||||
assert_nodes_equal(list(H), list(G))
|
||||
assert_edges_equal(list(H.edges()), list(G.edges()))
|
||||
os.close(fd)
|
||||
os.unlink(fname)
|
||||
|
||||
def test_edgelist_multigraph(self):
|
||||
G = self.MG
|
||||
(fd, fname) = tempfile.mkstemp()
|
||||
bipartite.write_edgelist(G, fname)
|
||||
H = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
|
||||
H2 = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
|
||||
assert H != H2 # they should be different graphs
|
||||
assert_nodes_equal(list(H), list(G))
|
||||
assert_edges_equal(list(H.edges()), list(G.edges()))
|
||||
os.close(fd)
|
||||
os.unlink(fname)
|
||||
|
||||
def test_empty_digraph(self):
|
||||
with pytest.raises(nx.NetworkXNotImplemented):
|
||||
bytesIO = io.BytesIO()
|
||||
bipartite.write_edgelist(nx.DiGraph(), bytesIO)
|
||||
|
||||
def test_raise_attribute(self):
|
||||
with pytest.raises(AttributeError):
|
||||
G = nx.path_graph(4)
|
||||
bytesIO = io.BytesIO()
|
||||
bipartite.write_edgelist(G, bytesIO)
|
|
@ -0,0 +1,399 @@
|
|||
import pytest
|
||||
import networkx as nx
|
||||
from ..generators import (
|
||||
alternating_havel_hakimi_graph,
|
||||
complete_bipartite_graph,
|
||||
configuration_model,
|
||||
gnmk_random_graph,
|
||||
havel_hakimi_graph,
|
||||
preferential_attachment_graph,
|
||||
random_graph,
|
||||
reverse_havel_hakimi_graph,
|
||||
)
|
||||
|
||||
"""
|
||||
Generators - Bipartite
|
||||
----------------------
|
||||
"""
|
||||
|
||||
|
||||
class TestGeneratorsBipartite:
|
||||
def test_complete_bipartite_graph(self):
|
||||
G = complete_bipartite_graph(0, 0)
|
||||
assert nx.is_isomorphic(G, nx.null_graph())
|
||||
|
||||
for i in [1, 5]:
|
||||
G = complete_bipartite_graph(i, 0)
|
||||
assert nx.is_isomorphic(G, nx.empty_graph(i))
|
||||
G = complete_bipartite_graph(0, i)
|
||||
assert nx.is_isomorphic(G, nx.empty_graph(i))
|
||||
|
||||
G = complete_bipartite_graph(2, 2)
|
||||
assert nx.is_isomorphic(G, nx.cycle_graph(4))
|
||||
|
||||
G = complete_bipartite_graph(1, 5)
|
||||
assert nx.is_isomorphic(G, nx.star_graph(5))
|
||||
|
||||
G = complete_bipartite_graph(5, 1)
|
||||
assert nx.is_isomorphic(G, nx.star_graph(5))
|
||||
|
||||
# complete_bipartite_graph(m1,m2) is a connected graph with
|
||||
# m1+m2 nodes and m1*m2 edges
|
||||
for m1, m2 in [(5, 11), (7, 3)]:
|
||||
G = complete_bipartite_graph(m1, m2)
|
||||
assert nx.number_of_nodes(G) == m1 + m2
|
||||
assert nx.number_of_edges(G) == m1 * m2
|
||||
|
||||
pytest.raises(
|
||||
nx.NetworkXError, complete_bipartite_graph, 7, 3, create_using=nx.DiGraph
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError, complete_bipartite_graph, 7, 3, create_using=nx.DiGraph
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
complete_bipartite_graph,
|
||||
7,
|
||||
3,
|
||||
create_using=nx.MultiDiGraph,
|
||||
)
|
||||
|
||||
mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph)
|
||||
assert mG.is_multigraph()
|
||||
assert sorted(mG.edges()) == sorted(G.edges())
|
||||
|
||||
mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph)
|
||||
assert mG.is_multigraph()
|
||||
assert sorted(mG.edges()) == sorted(G.edges())
|
||||
|
||||
mG = complete_bipartite_graph(7, 3) # default to Graph
|
||||
assert sorted(mG.edges()) == sorted(G.edges())
|
||||
assert not mG.is_multigraph()
|
||||
assert not mG.is_directed()
|
||||
|
||||
# specify nodes rather than number of nodes
|
||||
G = complete_bipartite_graph([1, 2], ["a", "b"])
|
||||
has_edges = (
|
||||
G.has_edge(1, "a")
|
||||
& G.has_edge(1, "b")
|
||||
& G.has_edge(2, "a")
|
||||
& G.has_edge(2, "b")
|
||||
)
|
||||
assert has_edges
|
||||
assert G.size() == 4
|
||||
|
||||
def test_configuration_model(self):
|
||||
aseq = []
|
||||
bseq = []
|
||||
G = configuration_model(aseq, bseq)
|
||||
assert len(G) == 0
|
||||
|
||||
aseq = [0, 0]
|
||||
bseq = [0, 0]
|
||||
G = configuration_model(aseq, bseq)
|
||||
assert len(G) == 4
|
||||
assert G.number_of_edges() == 0
|
||||
|
||||
aseq = [3, 3, 3, 3]
|
||||
bseq = [2, 2, 2, 2, 2]
|
||||
pytest.raises(nx.NetworkXError, configuration_model, aseq, bseq)
|
||||
|
||||
aseq = [3, 3, 3, 3]
|
||||
bseq = [2, 2, 2, 2, 2, 2]
|
||||
G = configuration_model(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 2, 2, 2]
|
||||
bseq = [3, 3, 3, 3]
|
||||
G = configuration_model(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 1, 1, 1]
|
||||
bseq = [3, 3, 3]
|
||||
G = configuration_model(aseq, bseq)
|
||||
assert G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
|
||||
|
||||
GU = nx.project(nx.Graph(G), range(len(aseq)))
|
||||
assert GU.number_of_nodes() == 6
|
||||
|
||||
GD = nx.project(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
|
||||
assert GD.number_of_nodes() == 3
|
||||
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
|
||||
assert not G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
|
||||
pytest.raises(
|
||||
nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph()
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
configuration_model,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.MultiDiGraph,
|
||||
)
|
||||
|
||||
def test_havel_hakimi_graph(self):
|
||||
aseq = []
|
||||
bseq = []
|
||||
G = havel_hakimi_graph(aseq, bseq)
|
||||
assert len(G) == 0
|
||||
|
||||
aseq = [0, 0]
|
||||
bseq = [0, 0]
|
||||
G = havel_hakimi_graph(aseq, bseq)
|
||||
assert len(G) == 4
|
||||
assert G.number_of_edges() == 0
|
||||
|
||||
aseq = [3, 3, 3, 3]
|
||||
bseq = [2, 2, 2, 2, 2]
|
||||
pytest.raises(nx.NetworkXError, havel_hakimi_graph, aseq, bseq)
|
||||
|
||||
bseq = [2, 2, 2, 2, 2, 2]
|
||||
G = havel_hakimi_graph(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 2, 2, 2]
|
||||
bseq = [3, 3, 3, 3]
|
||||
G = havel_hakimi_graph(aseq, bseq)
|
||||
assert G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
GU = nx.project(nx.Graph(G), range(len(aseq)))
|
||||
assert GU.number_of_nodes() == 6
|
||||
|
||||
GD = nx.project(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
|
||||
assert GD.number_of_nodes() == 4
|
||||
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
|
||||
assert not G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
|
||||
pytest.raises(
|
||||
nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.MultiDiGraph,
|
||||
)
|
||||
|
||||
def test_reverse_havel_hakimi_graph(self):
|
||||
aseq = []
|
||||
bseq = []
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq)
|
||||
assert len(G) == 0
|
||||
|
||||
aseq = [0, 0]
|
||||
bseq = [0, 0]
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq)
|
||||
assert len(G) == 4
|
||||
assert G.number_of_edges() == 0
|
||||
|
||||
aseq = [3, 3, 3, 3]
|
||||
bseq = [2, 2, 2, 2, 2]
|
||||
pytest.raises(nx.NetworkXError, reverse_havel_hakimi_graph, aseq, bseq)
|
||||
|
||||
bseq = [2, 2, 2, 2, 2, 2]
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 2, 2, 2]
|
||||
bseq = [3, 3, 3, 3]
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 1, 1, 1]
|
||||
bseq = [3, 3, 3]
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq)
|
||||
assert G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
|
||||
|
||||
GU = nx.project(nx.Graph(G), range(len(aseq)))
|
||||
assert GU.number_of_nodes() == 6
|
||||
|
||||
GD = nx.project(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
|
||||
assert GD.number_of_nodes() == 3
|
||||
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
|
||||
assert not G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
reverse_havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.DiGraph,
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
reverse_havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.DiGraph,
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
reverse_havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.MultiDiGraph,
|
||||
)
|
||||
|
||||
def test_alternating_havel_hakimi_graph(self):
|
||||
aseq = []
|
||||
bseq = []
|
||||
G = alternating_havel_hakimi_graph(aseq, bseq)
|
||||
assert len(G) == 0
|
||||
|
||||
aseq = [0, 0]
|
||||
bseq = [0, 0]
|
||||
G = alternating_havel_hakimi_graph(aseq, bseq)
|
||||
assert len(G) == 4
|
||||
assert G.number_of_edges() == 0
|
||||
|
||||
aseq = [3, 3, 3, 3]
|
||||
bseq = [2, 2, 2, 2, 2]
|
||||
pytest.raises(nx.NetworkXError, alternating_havel_hakimi_graph, aseq, bseq)
|
||||
|
||||
bseq = [2, 2, 2, 2, 2, 2]
|
||||
G = alternating_havel_hakimi_graph(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 2, 2, 2]
|
||||
bseq = [3, 3, 3, 3]
|
||||
G = alternating_havel_hakimi_graph(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 1, 1, 1]
|
||||
bseq = [3, 3, 3]
|
||||
G = alternating_havel_hakimi_graph(aseq, bseq)
|
||||
assert G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
|
||||
|
||||
GU = nx.project(nx.Graph(G), range(len(aseq)))
|
||||
assert GU.number_of_nodes() == 6
|
||||
|
||||
GD = nx.project(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
|
||||
assert GD.number_of_nodes() == 3
|
||||
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
|
||||
assert not G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
alternating_havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.DiGraph,
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
alternating_havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.DiGraph,
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
alternating_havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.MultiDiGraph,
|
||||
)
|
||||
|
||||
def test_preferential_attachment(self):
|
||||
aseq = [3, 2, 1, 1]
|
||||
G = preferential_attachment_graph(aseq, 0.5)
|
||||
assert G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
|
||||
G = preferential_attachment_graph(aseq, 0.5, create_using=nx.Graph)
|
||||
assert not G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
preferential_attachment_graph,
|
||||
aseq,
|
||||
0.5,
|
||||
create_using=nx.DiGraph(),
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
preferential_attachment_graph,
|
||||
aseq,
|
||||
0.5,
|
||||
create_using=nx.DiGraph(),
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
preferential_attachment_graph,
|
||||
aseq,
|
||||
0.5,
|
||||
create_using=nx.DiGraph(),
|
||||
)
|
||||
|
||||
def test_random_graph(self):
|
||||
n = 10
|
||||
m = 20
|
||||
G = random_graph(n, m, 0.9)
|
||||
assert len(G) == 30
|
||||
assert nx.is_bipartite(G)
|
||||
X, Y = nx.algorithms.bipartite.sets(G)
|
||||
assert set(range(n)) == X
|
||||
assert set(range(n, n + m)) == Y
|
||||
|
||||
def test_random_digraph(self):
|
||||
n = 10
|
||||
m = 20
|
||||
G = random_graph(n, m, 0.9, directed=True)
|
||||
assert len(G) == 30
|
||||
assert nx.is_bipartite(G)
|
||||
X, Y = nx.algorithms.bipartite.sets(G)
|
||||
assert set(range(n)) == X
|
||||
assert set(range(n, n + m)) == Y
|
||||
|
||||
def test_gnmk_random_graph(self):
|
||||
n = 10
|
||||
m = 20
|
||||
edges = 100
|
||||
# set seed because sometimes it is not connected
|
||||
# which raises an error in bipartite.sets(G) below.
|
||||
G = gnmk_random_graph(n, m, edges, seed=1234)
|
||||
assert len(G) == n + m
|
||||
assert nx.is_bipartite(G)
|
||||
X, Y = nx.algorithms.bipartite.sets(G)
|
||||
# print(X)
|
||||
assert set(range(n)) == X
|
||||
assert set(range(n, n + m)) == Y
|
||||
assert edges == len(list(G.edges()))
|
||||
|
||||
def test_gnmk_random_graph_complete(self):
|
||||
n = 10
|
||||
m = 20
|
||||
edges = 200
|
||||
G = gnmk_random_graph(n, m, edges)
|
||||
assert len(G) == n + m
|
||||
assert nx.is_bipartite(G)
|
||||
X, Y = nx.algorithms.bipartite.sets(G)
|
||||
# print(X)
|
||||
assert set(range(n)) == X
|
||||
assert set(range(n, n + m)) == Y
|
||||
assert edges == len(list(G.edges()))
|
|
@ -0,0 +1,316 @@
|
|||
"""Unit tests for the :mod:`networkx.algorithms.bipartite.matching` module."""
|
||||
import itertools
|
||||
|
||||
import networkx as nx
|
||||
|
||||
import pytest
|
||||
|
||||
from networkx.algorithms.bipartite.matching import eppstein_matching
|
||||
from networkx.algorithms.bipartite.matching import hopcroft_karp_matching
|
||||
from networkx.algorithms.bipartite.matching import maximum_matching
|
||||
from networkx.algorithms.bipartite.matching import minimum_weight_full_matching
|
||||
from networkx.algorithms.bipartite.matching import to_vertex_cover
|
||||
|
||||
|
||||
class TestMatching:
|
||||
"""Tests for bipartite matching algorithms."""
|
||||
|
||||
def setup(self):
|
||||
"""Creates a bipartite graph for use in testing matching algorithms.
|
||||
|
||||
The bipartite graph has a maximum cardinality matching that leaves
|
||||
vertex 1 and vertex 10 unmatched. The first six numbers are the left
|
||||
vertices and the next six numbers are the right vertices.
|
||||
|
||||
"""
|
||||
self.simple_graph = nx.complete_bipartite_graph(2, 3)
|
||||
self.simple_solution = {0: 2, 1: 3, 2: 0, 3: 1}
|
||||
|
||||
edges = [(0, 7), (0, 8), (2, 6), (2, 9), (3, 8), (4, 8), (4, 9), (5, 11)]
|
||||
self.top_nodes = set(range(6))
|
||||
self.graph = nx.Graph()
|
||||
self.graph.add_nodes_from(range(12))
|
||||
self.graph.add_edges_from(edges)
|
||||
|
||||
# Example bipartite graph from issue 2127
|
||||
G = nx.Graph()
|
||||
G.add_nodes_from(
|
||||
[
|
||||
(1, "C"),
|
||||
(1, "B"),
|
||||
(0, "G"),
|
||||
(1, "F"),
|
||||
(1, "E"),
|
||||
(0, "C"),
|
||||
(1, "D"),
|
||||
(1, "I"),
|
||||
(0, "A"),
|
||||
(0, "D"),
|
||||
(0, "F"),
|
||||
(0, "E"),
|
||||
(0, "H"),
|
||||
(1, "G"),
|
||||
(1, "A"),
|
||||
(0, "I"),
|
||||
(0, "B"),
|
||||
(1, "H"),
|
||||
]
|
||||
)
|
||||
G.add_edge((1, "C"), (0, "A"))
|
||||
G.add_edge((1, "B"), (0, "A"))
|
||||
G.add_edge((0, "G"), (1, "I"))
|
||||
G.add_edge((0, "G"), (1, "H"))
|
||||
G.add_edge((1, "F"), (0, "A"))
|
||||
G.add_edge((1, "F"), (0, "C"))
|
||||
G.add_edge((1, "F"), (0, "E"))
|
||||
G.add_edge((1, "E"), (0, "A"))
|
||||
G.add_edge((1, "E"), (0, "C"))
|
||||
G.add_edge((0, "C"), (1, "D"))
|
||||
G.add_edge((0, "C"), (1, "I"))
|
||||
G.add_edge((0, "C"), (1, "G"))
|
||||
G.add_edge((0, "C"), (1, "H"))
|
||||
G.add_edge((1, "D"), (0, "A"))
|
||||
G.add_edge((1, "I"), (0, "A"))
|
||||
G.add_edge((1, "I"), (0, "E"))
|
||||
G.add_edge((0, "A"), (1, "G"))
|
||||
G.add_edge((0, "A"), (1, "H"))
|
||||
G.add_edge((0, "E"), (1, "G"))
|
||||
G.add_edge((0, "E"), (1, "H"))
|
||||
self.disconnected_graph = G
|
||||
|
||||
def check_match(self, matching):
|
||||
"""Asserts that the matching is what we expect from the bipartite graph
|
||||
constructed in the :meth:`setup` fixture.
|
||||
|
||||
"""
|
||||
# For the sake of brevity, rename `matching` to `M`.
|
||||
M = matching
|
||||
matched_vertices = frozenset(itertools.chain(*M.items()))
|
||||
# Assert that the maximum number of vertices (10) is matched.
|
||||
assert matched_vertices == frozenset(range(12)) - {1, 10}
|
||||
# Assert that no vertex appears in two edges, or in other words, that
|
||||
# the matching (u, v) and (v, u) both appear in the matching
|
||||
# dictionary.
|
||||
assert all(u == M[M[u]] for u in range(12) if u in M)
|
||||
|
||||
def check_vertex_cover(self, vertices):
|
||||
"""Asserts that the given set of vertices is the vertex cover we
|
||||
expected from the bipartite graph constructed in the :meth:`setup`
|
||||
fixture.
|
||||
|
||||
"""
|
||||
# By Konig's theorem, the number of edges in a maximum matching equals
|
||||
# the number of vertices in a minimum vertex cover.
|
||||
assert len(vertices) == 5
|
||||
# Assert that the set is truly a vertex cover.
|
||||
for (u, v) in self.graph.edges():
|
||||
assert u in vertices or v in vertices
|
||||
# TODO Assert that the vertices are the correct ones.
|
||||
|
||||
def test_eppstein_matching(self):
|
||||
"""Tests that David Eppstein's implementation of the Hopcroft--Karp
|
||||
algorithm produces a maximum cardinality matching.
|
||||
|
||||
"""
|
||||
self.check_match(eppstein_matching(self.graph, self.top_nodes))
|
||||
|
||||
def test_hopcroft_karp_matching(self):
|
||||
"""Tests that the Hopcroft--Karp algorithm produces a maximum
|
||||
cardinality matching in a bipartite graph.
|
||||
|
||||
"""
|
||||
self.check_match(hopcroft_karp_matching(self.graph, self.top_nodes))
|
||||
|
||||
def test_to_vertex_cover(self):
|
||||
"""Test for converting a maximum matching to a minimum vertex cover."""
|
||||
matching = maximum_matching(self.graph, self.top_nodes)
|
||||
vertex_cover = to_vertex_cover(self.graph, matching, self.top_nodes)
|
||||
self.check_vertex_cover(vertex_cover)
|
||||
|
||||
def test_eppstein_matching_simple(self):
|
||||
match = eppstein_matching(self.simple_graph)
|
||||
assert match == self.simple_solution
|
||||
|
||||
def test_hopcroft_karp_matching_simple(self):
|
||||
match = hopcroft_karp_matching(self.simple_graph)
|
||||
assert match == self.simple_solution
|
||||
|
||||
def test_eppstein_matching_disconnected(self):
|
||||
with pytest.raises(nx.AmbiguousSolution):
|
||||
match = eppstein_matching(self.disconnected_graph)
|
||||
|
||||
def test_hopcroft_karp_matching_disconnected(self):
|
||||
with pytest.raises(nx.AmbiguousSolution):
|
||||
match = hopcroft_karp_matching(self.disconnected_graph)
|
||||
|
||||
def test_issue_2127(self):
|
||||
"""Test from issue 2127"""
|
||||
# Build the example DAG
|
||||
G = nx.DiGraph()
|
||||
G.add_edge("A", "C")
|
||||
G.add_edge("A", "B")
|
||||
G.add_edge("C", "E")
|
||||
G.add_edge("C", "D")
|
||||
G.add_edge("E", "G")
|
||||
G.add_edge("E", "F")
|
||||
G.add_edge("G", "I")
|
||||
G.add_edge("G", "H")
|
||||
|
||||
tc = nx.transitive_closure(G)
|
||||
btc = nx.Graph()
|
||||
|
||||
# Create a bipartite graph based on the transitive closure of G
|
||||
for v in tc.nodes():
|
||||
btc.add_node((0, v))
|
||||
btc.add_node((1, v))
|
||||
|
||||
for u, v in tc.edges():
|
||||
btc.add_edge((0, u), (1, v))
|
||||
|
||||
top_nodes = {n for n in btc if n[0] == 0}
|
||||
matching = hopcroft_karp_matching(btc, top_nodes)
|
||||
vertex_cover = to_vertex_cover(btc, matching, top_nodes)
|
||||
independent_set = set(G) - {v for _, v in vertex_cover}
|
||||
assert {"B", "D", "F", "I", "H"} == independent_set
|
||||
|
||||
def test_vertex_cover_issue_2384(self):
|
||||
G = nx.Graph([(0, 3), (1, 3), (1, 4), (2, 3)])
|
||||
matching = maximum_matching(G)
|
||||
vertex_cover = to_vertex_cover(G, matching)
|
||||
for u, v in G.edges():
|
||||
assert u in vertex_cover or v in vertex_cover
|
||||
|
||||
def test_unorderable_nodes(self):
|
||||
a = object()
|
||||
b = object()
|
||||
c = object()
|
||||
d = object()
|
||||
e = object()
|
||||
G = nx.Graph([(a, d), (b, d), (b, e), (c, d)])
|
||||
matching = maximum_matching(G)
|
||||
vertex_cover = to_vertex_cover(G, matching)
|
||||
for u, v in G.edges():
|
||||
assert u in vertex_cover or v in vertex_cover
|
||||
|
||||
|
||||
def test_eppstein_matching():
|
||||
"""Test in accordance to issue #1927"""
|
||||
G = nx.Graph()
|
||||
G.add_nodes_from(["a", 2, 3, 4], bipartite=0)
|
||||
G.add_nodes_from([1, "b", "c"], bipartite=1)
|
||||
G.add_edges_from([("a", 1), ("a", "b"), (2, "b"), (2, "c"), (3, "c"), (4, 1)])
|
||||
matching = eppstein_matching(G)
|
||||
assert len(matching) == len(maximum_matching(G))
|
||||
assert all(x in set(matching.keys()) for x in set(matching.values()))
|
||||
|
||||
|
||||
class TestMinimumWeightFullMatching:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
global scipy
|
||||
scipy = pytest.importorskip("scipy")
|
||||
|
||||
def test_minimum_weight_full_matching_incomplete_graph(self):
|
||||
B = nx.Graph()
|
||||
B.add_nodes_from([1, 2], bipartite=0)
|
||||
B.add_nodes_from([3, 4], bipartite=1)
|
||||
B.add_edge(1, 4, weight=100)
|
||||
B.add_edge(2, 3, weight=100)
|
||||
B.add_edge(2, 4, weight=50)
|
||||
matching = minimum_weight_full_matching(B)
|
||||
assert matching == {1: 4, 2: 3, 4: 1, 3: 2}
|
||||
|
||||
def test_minimum_weight_full_matching_with_no_full_matching(self):
|
||||
B = nx.Graph()
|
||||
B.add_nodes_from([1, 2, 3], bipartite=0)
|
||||
B.add_nodes_from([4, 5, 6], bipartite=1)
|
||||
B.add_edge(1, 4, weight=100)
|
||||
B.add_edge(2, 4, weight=100)
|
||||
B.add_edge(3, 4, weight=50)
|
||||
B.add_edge(3, 5, weight=50)
|
||||
B.add_edge(3, 6, weight=50)
|
||||
with pytest.raises(ValueError):
|
||||
minimum_weight_full_matching(B)
|
||||
|
||||
def test_minimum_weight_full_matching_square(self):
|
||||
G = nx.complete_bipartite_graph(3, 3)
|
||||
G.add_edge(0, 3, weight=400)
|
||||
G.add_edge(0, 4, weight=150)
|
||||
G.add_edge(0, 5, weight=400)
|
||||
G.add_edge(1, 3, weight=400)
|
||||
G.add_edge(1, 4, weight=450)
|
||||
G.add_edge(1, 5, weight=600)
|
||||
G.add_edge(2, 3, weight=300)
|
||||
G.add_edge(2, 4, weight=225)
|
||||
G.add_edge(2, 5, weight=300)
|
||||
matching = minimum_weight_full_matching(G)
|
||||
assert matching == {0: 4, 1: 3, 2: 5, 4: 0, 3: 1, 5: 2}
|
||||
|
||||
def test_minimum_weight_full_matching_smaller_left(self):
|
||||
G = nx.complete_bipartite_graph(3, 4)
|
||||
G.add_edge(0, 3, weight=400)
|
||||
G.add_edge(0, 4, weight=150)
|
||||
G.add_edge(0, 5, weight=400)
|
||||
G.add_edge(0, 6, weight=1)
|
||||
G.add_edge(1, 3, weight=400)
|
||||
G.add_edge(1, 4, weight=450)
|
||||
G.add_edge(1, 5, weight=600)
|
||||
G.add_edge(1, 6, weight=2)
|
||||
G.add_edge(2, 3, weight=300)
|
||||
G.add_edge(2, 4, weight=225)
|
||||
G.add_edge(2, 5, weight=290)
|
||||
G.add_edge(2, 6, weight=3)
|
||||
matching = minimum_weight_full_matching(G)
|
||||
assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1}
|
||||
|
||||
def test_minimum_weight_full_matching_smaller_top_nodes_right(self):
|
||||
G = nx.complete_bipartite_graph(3, 4)
|
||||
G.add_edge(0, 3, weight=400)
|
||||
G.add_edge(0, 4, weight=150)
|
||||
G.add_edge(0, 5, weight=400)
|
||||
G.add_edge(0, 6, weight=1)
|
||||
G.add_edge(1, 3, weight=400)
|
||||
G.add_edge(1, 4, weight=450)
|
||||
G.add_edge(1, 5, weight=600)
|
||||
G.add_edge(1, 6, weight=2)
|
||||
G.add_edge(2, 3, weight=300)
|
||||
G.add_edge(2, 4, weight=225)
|
||||
G.add_edge(2, 5, weight=290)
|
||||
G.add_edge(2, 6, weight=3)
|
||||
matching = minimum_weight_full_matching(G, top_nodes=[3, 4, 5, 6])
|
||||
assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1}
|
||||
|
||||
def test_minimum_weight_full_matching_smaller_right(self):
|
||||
G = nx.complete_bipartite_graph(4, 3)
|
||||
G.add_edge(0, 4, weight=400)
|
||||
G.add_edge(0, 5, weight=400)
|
||||
G.add_edge(0, 6, weight=300)
|
||||
G.add_edge(1, 4, weight=150)
|
||||
G.add_edge(1, 5, weight=450)
|
||||
G.add_edge(1, 6, weight=225)
|
||||
G.add_edge(2, 4, weight=400)
|
||||
G.add_edge(2, 5, weight=600)
|
||||
G.add_edge(2, 6, weight=290)
|
||||
G.add_edge(3, 4, weight=1)
|
||||
G.add_edge(3, 5, weight=2)
|
||||
G.add_edge(3, 6, weight=3)
|
||||
matching = minimum_weight_full_matching(G)
|
||||
assert matching == {1: 4, 2: 6, 3: 5, 4: 1, 5: 3, 6: 2}
|
||||
|
||||
def test_minimum_weight_full_matching_negative_weights(self):
|
||||
G = nx.complete_bipartite_graph(2, 2)
|
||||
G.add_edge(0, 2, weight=-2)
|
||||
G.add_edge(0, 3, weight=0.2)
|
||||
G.add_edge(1, 2, weight=-2)
|
||||
G.add_edge(1, 3, weight=0.3)
|
||||
matching = minimum_weight_full_matching(G)
|
||||
assert matching == {0: 3, 1: 2, 2: 1, 3: 0}
|
||||
|
||||
def test_minimum_weight_full_matching_different_weight_key(self):
|
||||
G = nx.complete_bipartite_graph(2, 2)
|
||||
G.add_edge(0, 2, mass=2)
|
||||
G.add_edge(0, 3, mass=0.2)
|
||||
G.add_edge(1, 2, mass=1)
|
||||
G.add_edge(1, 3, mass=2)
|
||||
matching = minimum_weight_full_matching(G, weight="mass")
|
||||
assert matching == {0: 3, 1: 2, 2: 1, 3: 0}
|
|
@ -0,0 +1,79 @@
|
|||
import pytest
|
||||
|
||||
np = pytest.importorskip("numpy")
|
||||
sp = pytest.importorskip("scipy")
|
||||
sparse = pytest.importorskip("scipy.sparse")
|
||||
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms import bipartite
|
||||
from networkx.testing.utils import assert_edges_equal
|
||||
|
||||
|
||||
class TestBiadjacencyMatrix:
|
||||
def test_biadjacency_matrix_weight(self):
|
||||
G = nx.path_graph(5)
|
||||
G.add_edge(0, 1, weight=2, other=4)
|
||||
X = [1, 3]
|
||||
Y = [0, 2, 4]
|
||||
M = bipartite.biadjacency_matrix(G, X, weight="weight")
|
||||
assert M[0, 0] == 2
|
||||
M = bipartite.biadjacency_matrix(G, X, weight="other")
|
||||
assert M[0, 0] == 4
|
||||
|
||||
def test_biadjacency_matrix(self):
|
||||
tops = [2, 5, 10]
|
||||
bots = [5, 10, 15]
|
||||
for i in range(len(tops)):
|
||||
G = bipartite.random_graph(tops[i], bots[i], 0.2)
|
||||
top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
|
||||
M = bipartite.biadjacency_matrix(G, top)
|
||||
assert M.shape[0] == tops[i]
|
||||
assert M.shape[1] == bots[i]
|
||||
|
||||
def test_biadjacency_matrix_order(self):
|
||||
G = nx.path_graph(5)
|
||||
G.add_edge(0, 1, weight=2)
|
||||
X = [3, 1]
|
||||
Y = [4, 2, 0]
|
||||
M = bipartite.biadjacency_matrix(G, X, Y, weight="weight")
|
||||
assert M[1, 2] == 2
|
||||
|
||||
def test_null_graph(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.biadjacency_matrix(nx.Graph(), [])
|
||||
|
||||
def test_empty_graph(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [])
|
||||
|
||||
def test_duplicate_row(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [1, 1])
|
||||
|
||||
def test_duplicate_col(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], [1, 1])
|
||||
|
||||
def test_format_keyword(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], format="foo")
|
||||
|
||||
def test_from_biadjacency_roundtrip(self):
|
||||
B1 = nx.path_graph(5)
|
||||
M = bipartite.biadjacency_matrix(B1, [0, 2, 4])
|
||||
B2 = bipartite.from_biadjacency_matrix(M)
|
||||
assert nx.is_isomorphic(B1, B2)
|
||||
|
||||
def test_from_biadjacency_weight(self):
|
||||
M = sparse.csc_matrix([[1, 2], [0, 3]])
|
||||
B = bipartite.from_biadjacency_matrix(M)
|
||||
assert_edges_equal(B.edges(), [(0, 2), (0, 3), (1, 3)])
|
||||
B = bipartite.from_biadjacency_matrix(M, edge_attribute="weight")
|
||||
e = [(0, 2, {"weight": 1}), (0, 3, {"weight": 2}), (1, 3, {"weight": 3})]
|
||||
assert_edges_equal(B.edges(data=True), e)
|
||||
|
||||
def test_from_biadjacency_multigraph(self):
|
||||
M = sparse.csc_matrix([[1, 2], [0, 3]])
|
||||
B = bipartite.from_biadjacency_matrix(M, create_using=nx.MultiGraph())
|
||||
assert_edges_equal(B.edges(), [(0, 2), (0, 3), (0, 3), (1, 3), (1, 3), (1, 3)])
|
|
@ -0,0 +1,392 @@
|
|||
import networkx as nx
|
||||
from networkx.algorithms import bipartite
|
||||
from networkx.testing import assert_edges_equal, assert_nodes_equal
|
||||
|
||||
|
||||
class TestBipartiteProject:
|
||||
def test_path_projected_graph(self):
|
||||
G = nx.path_graph(4)
|
||||
P = bipartite.projected_graph(G, [1, 3])
|
||||
assert_nodes_equal(list(P), [1, 3])
|
||||
assert_edges_equal(list(P.edges()), [(1, 3)])
|
||||
P = bipartite.projected_graph(G, [0, 2])
|
||||
assert_nodes_equal(list(P), [0, 2])
|
||||
assert_edges_equal(list(P.edges()), [(0, 2)])
|
||||
|
||||
def test_path_projected_properties_graph(self):
|
||||
G = nx.path_graph(4)
|
||||
G.add_node(1, name="one")
|
||||
G.add_node(2, name="two")
|
||||
P = bipartite.projected_graph(G, [1, 3])
|
||||
assert_nodes_equal(list(P), [1, 3])
|
||||
assert_edges_equal(list(P.edges()), [(1, 3)])
|
||||
assert P.nodes[1]["name"] == G.nodes[1]["name"]
|
||||
P = bipartite.projected_graph(G, [0, 2])
|
||||
assert_nodes_equal(list(P), [0, 2])
|
||||
assert_edges_equal(list(P.edges()), [(0, 2)])
|
||||
assert P.nodes[2]["name"] == G.nodes[2]["name"]
|
||||
|
||||
def test_path_collaboration_projected_graph(self):
|
||||
G = nx.path_graph(4)
|
||||
P = bipartite.collaboration_weighted_projected_graph(G, [1, 3])
|
||||
assert_nodes_equal(list(P), [1, 3])
|
||||
assert_edges_equal(list(P.edges()), [(1, 3)])
|
||||
P[1][3]["weight"] = 1
|
||||
P = bipartite.collaboration_weighted_projected_graph(G, [0, 2])
|
||||
assert_nodes_equal(list(P), [0, 2])
|
||||
assert_edges_equal(list(P.edges()), [(0, 2)])
|
||||
P[0][2]["weight"] = 1
|
||||
|
||||
def test_directed_path_collaboration_projected_graph(self):
|
||||
G = nx.DiGraph()
|
||||
nx.add_path(G, range(4))
|
||||
P = bipartite.collaboration_weighted_projected_graph(G, [1, 3])
|
||||
assert_nodes_equal(list(P), [1, 3])
|
||||
assert_edges_equal(list(P.edges()), [(1, 3)])
|
||||
P[1][3]["weight"] = 1
|
||||
P = bipartite.collaboration_weighted_projected_graph(G, [0, 2])
|
||||
assert_nodes_equal(list(P), [0, 2])
|
||||
assert_edges_equal(list(P.edges()), [(0, 2)])
|
||||
P[0][2]["weight"] = 1
|
||||
|
||||
def test_path_weighted_projected_graph(self):
|
||||
G = nx.path_graph(4)
|
||||
P = bipartite.weighted_projected_graph(G, [1, 3])
|
||||
assert_nodes_equal(list(P), [1, 3])
|
||||
assert_edges_equal(list(P.edges()), [(1, 3)])
|
||||
P[1][3]["weight"] = 1
|
||||
P = bipartite.weighted_projected_graph(G, [0, 2])
|
||||
assert_nodes_equal(list(P), [0, 2])
|
||||
assert_edges_equal(list(P.edges()), [(0, 2)])
|
||||
P[0][2]["weight"] = 1
|
||||
|
||||
def test_path_weighted_projected_directed_graph(self):
|
||||
G = nx.DiGraph()
|
||||
nx.add_path(G, range(4))
|
||||
P = bipartite.weighted_projected_graph(G, [1, 3])
|
||||
assert_nodes_equal(list(P), [1, 3])
|
||||
assert_edges_equal(list(P.edges()), [(1, 3)])
|
||||
P[1][3]["weight"] = 1
|
||||
P = bipartite.weighted_projected_graph(G, [0, 2])
|
||||
assert_nodes_equal(list(P), [0, 2])
|
||||
assert_edges_equal(list(P.edges()), [(0, 2)])
|
||||
P[0][2]["weight"] = 1
|
||||
|
||||
def test_star_projected_graph(self):
|
||||
G = nx.star_graph(3)
|
||||
P = bipartite.projected_graph(G, [1, 2, 3])
|
||||
assert_nodes_equal(list(P), [1, 2, 3])
|
||||
assert_edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)])
|
||||
P = bipartite.weighted_projected_graph(G, [1, 2, 3])
|
||||
assert_nodes_equal(list(P), [1, 2, 3])
|
||||
assert_edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)])
|
||||
|
||||
P = bipartite.projected_graph(G, [0])
|
||||
assert_nodes_equal(list(P), [0])
|
||||
assert_edges_equal(list(P.edges()), [])
|
||||
|
||||
def test_project_multigraph(self):
|
||||
G = nx.Graph()
|
||||
G.add_edge("a", 1)
|
||||
G.add_edge("b", 1)
|
||||
G.add_edge("a", 2)
|
||||
G.add_edge("b", 2)
|
||||
P = bipartite.projected_graph(G, "ab")
|
||||
assert_edges_equal(list(P.edges()), [("a", "b")])
|
||||
P = bipartite.weighted_projected_graph(G, "ab")
|
||||
assert_edges_equal(list(P.edges()), [("a", "b")])
|
||||
P = bipartite.projected_graph(G, "ab", multigraph=True)
|
||||
assert_edges_equal(list(P.edges()), [("a", "b"), ("a", "b")])
|
||||
|
||||
def test_project_collaboration(self):
|
||||
G = nx.Graph()
|
||||
G.add_edge("a", 1)
|
||||
G.add_edge("b", 1)
|
||||
G.add_edge("b", 2)
|
||||
G.add_edge("c", 2)
|
||||
G.add_edge("c", 3)
|
||||
G.add_edge("c", 4)
|
||||
G.add_edge("b", 4)
|
||||
P = bipartite.collaboration_weighted_projected_graph(G, "abc")
|
||||
assert P["a"]["b"]["weight"] == 1
|
||||
assert P["b"]["c"]["weight"] == 2
|
||||
|
||||
def test_directed_projection(self):
|
||||
G = nx.DiGraph()
|
||||
G.add_edge("A", 1)
|
||||
G.add_edge(1, "B")
|
||||
G.add_edge("A", 2)
|
||||
G.add_edge("B", 2)
|
||||
P = bipartite.projected_graph(G, "AB")
|
||||
assert_edges_equal(list(P.edges()), [("A", "B")])
|
||||
P = bipartite.weighted_projected_graph(G, "AB")
|
||||
assert_edges_equal(list(P.edges()), [("A", "B")])
|
||||
assert P["A"]["B"]["weight"] == 1
|
||||
|
||||
P = bipartite.projected_graph(G, "AB", multigraph=True)
|
||||
assert_edges_equal(list(P.edges()), [("A", "B")])
|
||||
|
||||
G = nx.DiGraph()
|
||||
G.add_edge("A", 1)
|
||||
G.add_edge(1, "B")
|
||||
G.add_edge("A", 2)
|
||||
G.add_edge(2, "B")
|
||||
P = bipartite.projected_graph(G, "AB")
|
||||
assert_edges_equal(list(P.edges()), [("A", "B")])
|
||||
P = bipartite.weighted_projected_graph(G, "AB")
|
||||
assert_edges_equal(list(P.edges()), [("A", "B")])
|
||||
assert P["A"]["B"]["weight"] == 2
|
||||
|
||||
P = bipartite.projected_graph(G, "AB", multigraph=True)
|
||||
assert_edges_equal(list(P.edges()), [("A", "B"), ("A", "B")])
|
||||
|
||||
|
||||
class TestBipartiteWeightedProjection:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
# Tore Opsahl's example
|
||||
# http://toreopsahl.com/2009/05/01/projecting-two-mode-networks-onto-weighted-one-mode-networks/
|
||||
cls.G = nx.Graph()
|
||||
cls.G.add_edge("A", 1)
|
||||
cls.G.add_edge("A", 2)
|
||||
cls.G.add_edge("B", 1)
|
||||
cls.G.add_edge("B", 2)
|
||||
cls.G.add_edge("B", 3)
|
||||
cls.G.add_edge("B", 4)
|
||||
cls.G.add_edge("B", 5)
|
||||
cls.G.add_edge("C", 1)
|
||||
cls.G.add_edge("D", 3)
|
||||
cls.G.add_edge("E", 4)
|
||||
cls.G.add_edge("E", 5)
|
||||
cls.G.add_edge("E", 6)
|
||||
cls.G.add_edge("F", 6)
|
||||
# Graph based on figure 6 from Newman (2001)
|
||||
cls.N = nx.Graph()
|
||||
cls.N.add_edge("A", 1)
|
||||
cls.N.add_edge("A", 2)
|
||||
cls.N.add_edge("A", 3)
|
||||
cls.N.add_edge("B", 1)
|
||||
cls.N.add_edge("B", 2)
|
||||
cls.N.add_edge("B", 3)
|
||||
cls.N.add_edge("C", 1)
|
||||
cls.N.add_edge("D", 1)
|
||||
cls.N.add_edge("E", 3)
|
||||
|
||||
def test_project_weighted_shared(self):
|
||||
edges = [
|
||||
("A", "B", 2),
|
||||
("A", "C", 1),
|
||||
("B", "C", 1),
|
||||
("B", "D", 1),
|
||||
("B", "E", 2),
|
||||
("E", "F", 1),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.weighted_projected_graph(self.G, "ABCDEF")
|
||||
assert_edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
edges = [
|
||||
("A", "B", 3),
|
||||
("A", "E", 1),
|
||||
("A", "C", 1),
|
||||
("A", "D", 1),
|
||||
("B", "E", 1),
|
||||
("B", "C", 1),
|
||||
("B", "D", 1),
|
||||
("C", "D", 1),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.weighted_projected_graph(self.N, "ABCDE")
|
||||
assert_edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
def test_project_weighted_newman(self):
|
||||
edges = [
|
||||
("A", "B", 1.5),
|
||||
("A", "C", 0.5),
|
||||
("B", "C", 0.5),
|
||||
("B", "D", 1),
|
||||
("B", "E", 2),
|
||||
("E", "F", 1),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.collaboration_weighted_projected_graph(self.G, "ABCDEF")
|
||||
assert_edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
edges = [
|
||||
("A", "B", 11 / 6.0),
|
||||
("A", "E", 1 / 2.0),
|
||||
("A", "C", 1 / 3.0),
|
||||
("A", "D", 1 / 3.0),
|
||||
("B", "E", 1 / 2.0),
|
||||
("B", "C", 1 / 3.0),
|
||||
("B", "D", 1 / 3.0),
|
||||
("C", "D", 1 / 3.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.collaboration_weighted_projected_graph(self.N, "ABCDE")
|
||||
assert_edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
def test_project_weighted_ratio(self):
|
||||
edges = [
|
||||
("A", "B", 2 / 6.0),
|
||||
("A", "C", 1 / 6.0),
|
||||
("B", "C", 1 / 6.0),
|
||||
("B", "D", 1 / 6.0),
|
||||
("B", "E", 2 / 6.0),
|
||||
("E", "F", 1 / 6.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.weighted_projected_graph(self.G, "ABCDEF", ratio=True)
|
||||
assert_edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
edges = [
|
||||
("A", "B", 3 / 3.0),
|
||||
("A", "E", 1 / 3.0),
|
||||
("A", "C", 1 / 3.0),
|
||||
("A", "D", 1 / 3.0),
|
||||
("B", "E", 1 / 3.0),
|
||||
("B", "C", 1 / 3.0),
|
||||
("B", "D", 1 / 3.0),
|
||||
("C", "D", 1 / 3.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.weighted_projected_graph(self.N, "ABCDE", ratio=True)
|
||||
assert_edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
def test_project_weighted_overlap(self):
|
||||
edges = [
|
||||
("A", "B", 2 / 2.0),
|
||||
("A", "C", 1 / 1.0),
|
||||
("B", "C", 1 / 1.0),
|
||||
("B", "D", 1 / 1.0),
|
||||
("B", "E", 2 / 3.0),
|
||||
("E", "F", 1 / 1.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF", jaccard=False)
|
||||
assert_edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
edges = [
|
||||
("A", "B", 3 / 3.0),
|
||||
("A", "E", 1 / 1.0),
|
||||
("A", "C", 1 / 1.0),
|
||||
("A", "D", 1 / 1.0),
|
||||
("B", "E", 1 / 1.0),
|
||||
("B", "C", 1 / 1.0),
|
||||
("B", "D", 1 / 1.0),
|
||||
("C", "D", 1 / 1.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE", jaccard=False)
|
||||
assert_edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
def test_project_weighted_jaccard(self):
|
||||
edges = [
|
||||
("A", "B", 2 / 5.0),
|
||||
("A", "C", 1 / 2.0),
|
||||
("B", "C", 1 / 5.0),
|
||||
("B", "D", 1 / 5.0),
|
||||
("B", "E", 2 / 6.0),
|
||||
("E", "F", 1 / 3.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF")
|
||||
assert_edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
edges = [
|
||||
("A", "B", 3 / 3.0),
|
||||
("A", "E", 1 / 3.0),
|
||||
("A", "C", 1 / 3.0),
|
||||
("A", "D", 1 / 3.0),
|
||||
("B", "E", 1 / 3.0),
|
||||
("B", "C", 1 / 3.0),
|
||||
("B", "D", 1 / 3.0),
|
||||
("C", "D", 1 / 1.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE")
|
||||
assert_edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in P.edges():
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
def test_generic_weighted_projected_graph_simple(self):
|
||||
def shared(G, u, v):
|
||||
return len(set(G[u]) & set(G[v]))
|
||||
|
||||
B = nx.path_graph(5)
|
||||
G = bipartite.generic_weighted_projected_graph(
|
||||
B, [0, 2, 4], weight_function=shared
|
||||
)
|
||||
assert_nodes_equal(list(G), [0, 2, 4])
|
||||
assert_edges_equal(
|
||||
list(list(G.edges(data=True))),
|
||||
[(0, 2, {"weight": 1}), (2, 4, {"weight": 1})],
|
||||
)
|
||||
|
||||
G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4])
|
||||
assert_nodes_equal(list(G), [0, 2, 4])
|
||||
assert_edges_equal(
|
||||
list(list(G.edges(data=True))),
|
||||
[(0, 2, {"weight": 1}), (2, 4, {"weight": 1})],
|
||||
)
|
||||
B = nx.DiGraph()
|
||||
nx.add_path(B, range(5))
|
||||
G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4])
|
||||
assert_nodes_equal(list(G), [0, 2, 4])
|
||||
assert_edges_equal(
|
||||
list(G.edges(data=True)), [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})]
|
||||
)
|
||||
|
||||
def test_generic_weighted_projected_graph_custom(self):
|
||||
def jaccard(G, u, v):
|
||||
unbrs = set(G[u])
|
||||
vnbrs = set(G[v])
|
||||
return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs)
|
||||
|
||||
def my_weight(G, u, v, weight="weight"):
|
||||
w = 0
|
||||
for nbr in set(G[u]) & set(G[v]):
|
||||
w += G.edges[u, nbr].get(weight, 1) + G.edges[v, nbr].get(weight, 1)
|
||||
return w
|
||||
|
||||
B = nx.bipartite.complete_bipartite_graph(2, 2)
|
||||
for i, (u, v) in enumerate(B.edges()):
|
||||
B.edges[u, v]["weight"] = i + 1
|
||||
G = bipartite.generic_weighted_projected_graph(
|
||||
B, [0, 1], weight_function=jaccard
|
||||
)
|
||||
assert_edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 1.0})])
|
||||
G = bipartite.generic_weighted_projected_graph(
|
||||
B, [0, 1], weight_function=my_weight
|
||||
)
|
||||
assert_edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 10})])
|
||||
G = bipartite.generic_weighted_projected_graph(B, [0, 1])
|
||||
assert_edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 2})])
|
|
@ -0,0 +1,33 @@
|
|||
"""Unit tests for the :mod:`networkx.algorithms.bipartite.redundancy` module.
|
||||
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
from networkx import cycle_graph
|
||||
from networkx import NetworkXError
|
||||
from networkx.algorithms.bipartite import complete_bipartite_graph
|
||||
from networkx.algorithms.bipartite import node_redundancy
|
||||
|
||||
|
||||
def test_no_redundant_nodes():
|
||||
G = complete_bipartite_graph(2, 2)
|
||||
rc = node_redundancy(G)
|
||||
assert all(redundancy == 1 for redundancy in rc.values())
|
||||
|
||||
|
||||
def test_redundant_nodes():
|
||||
G = cycle_graph(6)
|
||||
edge = {0, 3}
|
||||
G.add_edge(*edge)
|
||||
redundancy = node_redundancy(G)
|
||||
for v in edge:
|
||||
assert redundancy[v] == 2 / 3
|
||||
for v in set(G) - edge:
|
||||
assert redundancy[v] == 1
|
||||
|
||||
|
||||
def test_not_enough_neighbors():
|
||||
with pytest.raises(NetworkXError):
|
||||
G = complete_bipartite_graph(1, 2)
|
||||
node_redundancy(G)
|
|
@ -0,0 +1,85 @@
|
|||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.bipartite import spectral_bipartivity as sb
|
||||
from networkx.testing import almost_equal
|
||||
|
||||
# Examples from Figure 1
|
||||
# E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of
|
||||
# bipartivity in complex networks", PhysRev E 72, 046105 (2005)
|
||||
|
||||
|
||||
class TestSpectralBipartivity:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
global scipy
|
||||
scipy = pytest.importorskip("scipy")
|
||||
|
||||
def test_star_like(self):
|
||||
# star-like
|
||||
|
||||
G = nx.star_graph(2)
|
||||
G.add_edge(1, 2)
|
||||
assert almost_equal(sb(G), 0.843, places=3)
|
||||
|
||||
G = nx.star_graph(3)
|
||||
G.add_edge(1, 2)
|
||||
assert almost_equal(sb(G), 0.871, places=3)
|
||||
|
||||
G = nx.star_graph(4)
|
||||
G.add_edge(1, 2)
|
||||
assert almost_equal(sb(G), 0.890, places=3)
|
||||
|
||||
def test_k23_like(self):
|
||||
# K2,3-like
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(0, 1)
|
||||
assert almost_equal(sb(G), 0.769, places=3)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(2, 4)
|
||||
assert almost_equal(sb(G), 0.829, places=3)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(2, 4)
|
||||
G.add_edge(3, 4)
|
||||
assert almost_equal(sb(G), 0.731, places=3)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(0, 1)
|
||||
G.add_edge(2, 4)
|
||||
assert almost_equal(sb(G), 0.692, places=3)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(2, 4)
|
||||
G.add_edge(3, 4)
|
||||
G.add_edge(0, 1)
|
||||
assert almost_equal(sb(G), 0.645, places=3)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(2, 4)
|
||||
G.add_edge(3, 4)
|
||||
G.add_edge(2, 3)
|
||||
assert almost_equal(sb(G), 0.645, places=3)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(2, 4)
|
||||
G.add_edge(3, 4)
|
||||
G.add_edge(2, 3)
|
||||
G.add_edge(0, 1)
|
||||
assert almost_equal(sb(G), 0.597, places=3)
|
||||
|
||||
def test_single_nodes(self):
|
||||
|
||||
# single nodes
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(2, 4)
|
||||
sbn = sb(G, nodes=[1, 2])
|
||||
assert almost_equal(sbn[1], 0.85, places=2)
|
||||
assert almost_equal(sbn[2], 0.77, places=2)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(0, 1)
|
||||
sbn = sb(G, nodes=[1, 2])
|
||||
assert almost_equal(sbn[1], 0.73, places=2)
|
||||
assert almost_equal(sbn[2], 0.82, places=2)
|
Loading…
Add table
Add a link
Reference in a new issue