Fixed database typo and removed unnecessary class identifier.
This commit is contained in:
parent
00ad49a143
commit
45fb349a7d
5098 changed files with 952558 additions and 85 deletions
7
venv/Lib/site-packages/networkx/utils/__init__.py
Normal file
7
venv/Lib/site-packages/networkx/utils/__init__.py
Normal file
|
@ -0,0 +1,7 @@
|
|||
from networkx.utils.misc import *
|
||||
from networkx.utils.decorators import *
|
||||
from networkx.utils.random_sequence import *
|
||||
from networkx.utils.union_find import *
|
||||
from networkx.utils.rcm import *
|
||||
from networkx.utils.heaps import *
|
||||
from networkx.utils.contextmanagers import *
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
42
venv/Lib/site-packages/networkx/utils/contextmanagers.py
Normal file
42
venv/Lib/site-packages/networkx/utils/contextmanagers.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
from contextlib import contextmanager
|
||||
import warnings
|
||||
|
||||
__all__ = ["reversed"]
|
||||
|
||||
|
||||
@contextmanager
|
||||
def reversed(G):
|
||||
"""A context manager for temporarily reversing a directed graph in place.
|
||||
|
||||
This is a no-op for undirected graphs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
|
||||
Warning
|
||||
-------
|
||||
The reversed context manager is deprecated in favor
|
||||
of G.reverse(copy=False). The view allows multiple threads to use the
|
||||
same graph without confusion while the context manager does not.
|
||||
This context manager is scheduled to be removed in version 3.0.
|
||||
"""
|
||||
msg = (
|
||||
"context manager reversed is deprecated and to be removed in 3.0."
|
||||
"Use G.reverse(copy=False) if G.is_directed() else G instead."
|
||||
)
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
|
||||
directed = G.is_directed()
|
||||
if directed:
|
||||
G._pred, G._succ = G._succ, G._pred
|
||||
G._adj = G._succ
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if directed:
|
||||
# Reverse the reverse.
|
||||
G._pred, G._succ = G._succ, G._pred
|
||||
G._adj = G._succ
|
471
venv/Lib/site-packages/networkx/utils/decorators.py
Normal file
471
venv/Lib/site-packages/networkx/utils/decorators.py
Normal file
|
@ -0,0 +1,471 @@
|
|||
from collections import defaultdict
|
||||
from os.path import splitext
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
|
||||
import networkx as nx
|
||||
from decorator import decorator
|
||||
from networkx.utils import create_random_state, create_py_random_state
|
||||
|
||||
__all__ = [
|
||||
"not_implemented_for",
|
||||
"open_file",
|
||||
"nodes_or_number",
|
||||
"preserve_random_state",
|
||||
"random_state",
|
||||
"np_random_state",
|
||||
"py_random_state",
|
||||
]
|
||||
|
||||
|
||||
def not_implemented_for(*graph_types):
|
||||
"""Decorator to mark algorithms as not implemented
|
||||
|
||||
Parameters
|
||||
----------
|
||||
graph_types : container of strings
|
||||
Entries must be one of 'directed','undirected', 'multigraph', 'graph'.
|
||||
|
||||
Returns
|
||||
-------
|
||||
_require : function
|
||||
The decorated function.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If any of the packages cannot be imported
|
||||
|
||||
Notes
|
||||
-----
|
||||
Multiple types are joined logically with "and".
|
||||
For "or" use multiple @not_implemented_for() lines.
|
||||
|
||||
Examples
|
||||
--------
|
||||
Decorate functions like this::
|
||||
|
||||
@not_implemnted_for('directed')
|
||||
def sp_function(G):
|
||||
pass
|
||||
|
||||
@not_implemnted_for('directed','multigraph')
|
||||
def sp_np_function(G):
|
||||
pass
|
||||
"""
|
||||
|
||||
@decorator
|
||||
def _not_implemented_for(not_implement_for_func, *args, **kwargs):
|
||||
graph = args[0]
|
||||
terms = {
|
||||
"directed": graph.is_directed(),
|
||||
"undirected": not graph.is_directed(),
|
||||
"multigraph": graph.is_multigraph(),
|
||||
"graph": not graph.is_multigraph(),
|
||||
}
|
||||
match = True
|
||||
try:
|
||||
for t in graph_types:
|
||||
match = match and terms[t]
|
||||
except KeyError as e:
|
||||
raise KeyError(
|
||||
"use one or more of " "directed, undirected, multigraph, graph"
|
||||
) from e
|
||||
if match:
|
||||
msg = f"not implemented for {' '.join(graph_types)} type"
|
||||
raise nx.NetworkXNotImplemented(msg)
|
||||
else:
|
||||
return not_implement_for_func(*args, **kwargs)
|
||||
|
||||
return _not_implemented_for
|
||||
|
||||
|
||||
def _open_gz(path, mode):
|
||||
import gzip
|
||||
|
||||
return gzip.open(path, mode=mode)
|
||||
|
||||
|
||||
def _open_bz2(path, mode):
|
||||
import bz2
|
||||
|
||||
return bz2.BZ2File(path, mode=mode)
|
||||
|
||||
|
||||
# To handle new extensions, define a function accepting a `path` and `mode`.
|
||||
# Then add the extension to _dispatch_dict.
|
||||
_dispatch_dict = defaultdict(lambda: open)
|
||||
_dispatch_dict[".gz"] = _open_gz
|
||||
_dispatch_dict[".bz2"] = _open_bz2
|
||||
_dispatch_dict[".gzip"] = _open_gz
|
||||
|
||||
|
||||
def open_file(path_arg, mode="r"):
|
||||
"""Decorator to ensure clean opening and closing of files.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path_arg : int
|
||||
Location of the path argument in args. Even if the argument is a
|
||||
named positional argument (with a default value), you must specify its
|
||||
index as a positional argument.
|
||||
mode : str
|
||||
String for opening mode.
|
||||
|
||||
Returns
|
||||
-------
|
||||
_open_file : function
|
||||
Function which cleanly executes the io.
|
||||
|
||||
Examples
|
||||
--------
|
||||
Decorate functions like this::
|
||||
|
||||
@open_file(0,'r')
|
||||
def read_function(pathname):
|
||||
pass
|
||||
|
||||
@open_file(1,'w')
|
||||
def write_function(G,pathname):
|
||||
pass
|
||||
|
||||
@open_file(1,'w')
|
||||
def write_function(G, pathname='graph.dot')
|
||||
pass
|
||||
|
||||
@open_file('path', 'w+')
|
||||
def another_function(arg, **kwargs):
|
||||
path = kwargs['path']
|
||||
pass
|
||||
"""
|
||||
# Note that this decorator solves the problem when a path argument is
|
||||
# specified as a string, but it does not handle the situation when the
|
||||
# function wants to accept a default of None (and then handle it).
|
||||
# Here is an example:
|
||||
#
|
||||
# @open_file('path')
|
||||
# def some_function(arg1, arg2, path=None):
|
||||
# if path is None:
|
||||
# fobj = tempfile.NamedTemporaryFile(delete=False)
|
||||
# close_fobj = True
|
||||
# else:
|
||||
# # `path` could have been a string or file object or something
|
||||
# # similar. In any event, the decorator has given us a file object
|
||||
# # and it will close it for us, if it should.
|
||||
# fobj = path
|
||||
# close_fobj = False
|
||||
#
|
||||
# try:
|
||||
# fobj.write('blah')
|
||||
# finally:
|
||||
# if close_fobj:
|
||||
# fobj.close()
|
||||
#
|
||||
# Normally, we'd want to use "with" to ensure that fobj gets closed.
|
||||
# However, recall that the decorator will make `path` a file object for
|
||||
# us, and using "with" would undesirably close that file object. Instead,
|
||||
# you use a try block, as shown above. When we exit the function, fobj will
|
||||
# be closed, if it should be, by the decorator.
|
||||
|
||||
@decorator
|
||||
def _open_file(func_to_be_decorated, *args, **kwargs):
|
||||
|
||||
# Note that since we have used @decorator, *args, and **kwargs have
|
||||
# already been resolved to match the function signature of func. This
|
||||
# means default values have been propagated. For example, the function
|
||||
# func(x, y, a=1, b=2, **kwargs) if called as func(0,1,b=5,c=10) would
|
||||
# have args=(0,1,1,5) and kwargs={'c':10}.
|
||||
|
||||
# First we parse the arguments of the decorator. The path_arg could
|
||||
# be an positional argument or a keyword argument. Even if it is
|
||||
try:
|
||||
# path_arg is a required positional argument
|
||||
# This works precisely because we are using @decorator
|
||||
path = args[path_arg]
|
||||
except TypeError:
|
||||
# path_arg is a keyword argument. It is "required" in the sense
|
||||
# that it must exist, according to the decorator specification,
|
||||
# It can exist in `kwargs` by a developer specified default value
|
||||
# or it could have been explicitly set by the user.
|
||||
try:
|
||||
path = kwargs[path_arg]
|
||||
except KeyError as e:
|
||||
# Could not find the keyword. Thus, no default was specified
|
||||
# in the function signature and the user did not provide it.
|
||||
msg = f"Missing required keyword argument: {path_arg}"
|
||||
raise nx.NetworkXError(msg) from e
|
||||
else:
|
||||
is_kwarg = True
|
||||
except IndexError as e:
|
||||
# A "required" argument was missing. This can only happen if
|
||||
# the decorator of the function was incorrectly specified.
|
||||
# So this probably is not a user error, but a developer error.
|
||||
msg = "path_arg of open_file decorator is incorrect"
|
||||
raise nx.NetworkXError(msg) from e
|
||||
else:
|
||||
is_kwarg = False
|
||||
|
||||
# Now we have the path_arg. There are two types of input to consider:
|
||||
# 1) string representing a path that should be opened
|
||||
# 2) an already opened file object
|
||||
if isinstance(path, str):
|
||||
ext = splitext(path)[1]
|
||||
fobj = _dispatch_dict[ext](path, mode=mode)
|
||||
close_fobj = True
|
||||
elif hasattr(path, "read"):
|
||||
# path is already a file-like object
|
||||
fobj = path
|
||||
close_fobj = False
|
||||
elif isinstance(path, Path):
|
||||
# path is a pathlib reference to a filename
|
||||
fobj = _dispatch_dict[path.suffix](str(path), mode=mode)
|
||||
close_fobj = True
|
||||
else:
|
||||
# could be None, in which case the algorithm will deal with it
|
||||
fobj = path
|
||||
close_fobj = False
|
||||
|
||||
# Insert file object into args or kwargs.
|
||||
if is_kwarg:
|
||||
new_args = args
|
||||
kwargs[path_arg] = fobj
|
||||
else:
|
||||
# args is a tuple, so we must convert to list before modifying it.
|
||||
new_args = list(args)
|
||||
new_args[path_arg] = fobj
|
||||
|
||||
# Finally, we call the original function, making sure to close the fobj
|
||||
try:
|
||||
result = func_to_be_decorated(*new_args, **kwargs)
|
||||
finally:
|
||||
if close_fobj:
|
||||
fobj.close()
|
||||
|
||||
return result
|
||||
|
||||
return _open_file
|
||||
|
||||
|
||||
def nodes_or_number(which_args):
|
||||
"""Decorator to allow number of nodes or container of nodes.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
which_args : int or sequence of ints
|
||||
Location of the node arguments in args. Even if the argument is a
|
||||
named positional argument (with a default value), you must specify its
|
||||
index as a positional argument.
|
||||
If more than one node argument is allowed, can be a list of locations.
|
||||
|
||||
Returns
|
||||
-------
|
||||
_nodes_or_numbers : function
|
||||
Function which replaces int args with ranges.
|
||||
|
||||
Examples
|
||||
--------
|
||||
Decorate functions like this::
|
||||
|
||||
@nodes_or_number(0)
|
||||
def empty_graph(nodes):
|
||||
pass
|
||||
|
||||
@nodes_or_number([0,1])
|
||||
def grid_2d_graph(m1, m2, periodic=False):
|
||||
pass
|
||||
|
||||
@nodes_or_number(1)
|
||||
def full_rary_tree(r, n)
|
||||
# r is a number. n can be a number of a list of nodes
|
||||
pass
|
||||
"""
|
||||
|
||||
@decorator
|
||||
def _nodes_or_number(func_to_be_decorated, *args, **kw):
|
||||
# form tuple of arg positions to be converted.
|
||||
try:
|
||||
iter_wa = iter(which_args)
|
||||
except TypeError:
|
||||
iter_wa = (which_args,)
|
||||
# change each argument in turn
|
||||
new_args = list(args)
|
||||
for i in iter_wa:
|
||||
n = args[i]
|
||||
try:
|
||||
nodes = list(range(n))
|
||||
except TypeError:
|
||||
nodes = tuple(n)
|
||||
else:
|
||||
if n < 0:
|
||||
msg = "Negative number of nodes not valid: {n}"
|
||||
raise nx.NetworkXError(msg)
|
||||
new_args[i] = (n, nodes)
|
||||
return func_to_be_decorated(*new_args, **kw)
|
||||
|
||||
return _nodes_or_number
|
||||
|
||||
|
||||
def preserve_random_state(func):
|
||||
""" Decorator to preserve the numpy.random state during a function.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
func : function
|
||||
function around which to preserve the random state.
|
||||
|
||||
Returns
|
||||
-------
|
||||
wrapper : function
|
||||
Function which wraps the input function by saving the state before
|
||||
calling the function and restoring the function afterward.
|
||||
|
||||
Examples
|
||||
--------
|
||||
Decorate functions like this::
|
||||
|
||||
@preserve_random_state
|
||||
def do_random_stuff(x, y):
|
||||
return x + y * numpy.random.random()
|
||||
|
||||
Notes
|
||||
-----
|
||||
If numpy.random is not importable, the state is not saved or restored.
|
||||
"""
|
||||
try:
|
||||
from numpy.random import get_state, seed, set_state
|
||||
|
||||
@contextmanager
|
||||
def save_random_state():
|
||||
state = get_state()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
set_state(state)
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
with save_random_state():
|
||||
seed(1234567890)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
wrapper.__name__ = func.__name__
|
||||
return wrapper
|
||||
except ImportError:
|
||||
return func
|
||||
|
||||
|
||||
def random_state(random_state_index):
|
||||
"""Decorator to generate a numpy.random.RandomState instance.
|
||||
|
||||
Argument position `random_state_index` is processed by create_random_state.
|
||||
The result is a numpy.random.RandomState instance.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
random_state_index : int
|
||||
Location of the random_state argument in args that is to be used to
|
||||
generate the numpy.random.RandomState instance. Even if the argument is
|
||||
a named positional argument (with a default value), you must specify
|
||||
its index as a positional argument.
|
||||
|
||||
Returns
|
||||
-------
|
||||
_random_state : function
|
||||
Function whose random_state keyword argument is a RandomState instance.
|
||||
|
||||
Examples
|
||||
--------
|
||||
Decorate functions like this::
|
||||
|
||||
@np_random_state(0)
|
||||
def random_float(random_state=None):
|
||||
return random_state.rand()
|
||||
|
||||
@np_random_state(1)
|
||||
def random_array(dims, random_state=1):
|
||||
return random_state.rand(*dims)
|
||||
|
||||
See Also
|
||||
--------
|
||||
py_random_state
|
||||
"""
|
||||
|
||||
@decorator
|
||||
def _random_state(func, *args, **kwargs):
|
||||
# Parse the decorator arguments.
|
||||
try:
|
||||
random_state_arg = args[random_state_index]
|
||||
except TypeError as e:
|
||||
raise nx.NetworkXError("random_state_index must be an integer") from e
|
||||
except IndexError as e:
|
||||
raise nx.NetworkXError("random_state_index is incorrect") from e
|
||||
|
||||
# Create a numpy.random.RandomState instance
|
||||
random_state = create_random_state(random_state_arg)
|
||||
|
||||
# args is a tuple, so we must convert to list before modifying it.
|
||||
new_args = list(args)
|
||||
new_args[random_state_index] = random_state
|
||||
return func(*new_args, **kwargs)
|
||||
|
||||
return _random_state
|
||||
|
||||
|
||||
np_random_state = random_state
|
||||
|
||||
|
||||
def py_random_state(random_state_index):
|
||||
"""Decorator to generate a random.Random instance (or equiv).
|
||||
|
||||
Argument position `random_state_index` processed by create_py_random_state.
|
||||
The result is either a random.Random instance, or numpy.random.RandomState
|
||||
instance with additional attributes to mimic basic methods of Random.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
random_state_index : int
|
||||
Location of the random_state argument in args that is to be used to
|
||||
generate the numpy.random.RandomState instance. Even if the argument is
|
||||
a named positional argument (with a default value), you must specify
|
||||
its index as a positional argument.
|
||||
|
||||
Returns
|
||||
-------
|
||||
_random_state : function
|
||||
Function whose random_state keyword argument is a RandomState instance.
|
||||
|
||||
Examples
|
||||
--------
|
||||
Decorate functions like this::
|
||||
|
||||
@py_random_state(0)
|
||||
def random_float(random_state=None):
|
||||
return random_state.rand()
|
||||
|
||||
@py_random_state(1)
|
||||
def random_array(dims, random_state=1):
|
||||
return random_state.rand(*dims)
|
||||
|
||||
See Also
|
||||
--------
|
||||
np_random_state
|
||||
"""
|
||||
|
||||
@decorator
|
||||
def _random_state(func, *args, **kwargs):
|
||||
# Parse the decorator arguments.
|
||||
try:
|
||||
random_state_arg = args[random_state_index]
|
||||
except TypeError as e:
|
||||
raise nx.NetworkXError("random_state_index must be an integer") from e
|
||||
except IndexError as e:
|
||||
raise nx.NetworkXError("random_state_index is incorrect") from e
|
||||
|
||||
# Create a numpy.random.RandomState instance
|
||||
random_state = create_py_random_state(random_state_arg)
|
||||
|
||||
# args is a tuple, so we must convert to list before modifying it.
|
||||
new_args = list(args)
|
||||
new_args[random_state_index] = random_state
|
||||
return func(*new_args, **kwargs)
|
||||
|
||||
return _random_state
|
368
venv/Lib/site-packages/networkx/utils/heaps.py
Normal file
368
venv/Lib/site-packages/networkx/utils/heaps.py
Normal file
|
@ -0,0 +1,368 @@
|
|||
"""
|
||||
Min-heaps.
|
||||
"""
|
||||
|
||||
from heapq import heappop, heappush
|
||||
from itertools import count
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["MinHeap", "PairingHeap", "BinaryHeap"]
|
||||
|
||||
|
||||
class MinHeap:
|
||||
"""Base class for min-heaps.
|
||||
|
||||
A MinHeap stores a collection of key-value pairs ordered by their values.
|
||||
It supports querying the minimum pair, inserting a new pair, decreasing the
|
||||
value in an existing pair and deleting the minimum pair.
|
||||
"""
|
||||
|
||||
class _Item:
|
||||
"""Used by subclassess to represent a key-value pair.
|
||||
"""
|
||||
|
||||
__slots__ = ("key", "value")
|
||||
|
||||
def __init__(self, key, value):
|
||||
self.key = key
|
||||
self.value = value
|
||||
|
||||
def __repr__(self):
|
||||
return repr((self.key, self.value))
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize a new min-heap.
|
||||
"""
|
||||
self._dict = {}
|
||||
|
||||
def min(self):
|
||||
"""Query the minimum key-value pair.
|
||||
|
||||
Returns
|
||||
-------
|
||||
key, value : tuple
|
||||
The key-value pair with the minimum value in the heap.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If the heap is empty.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def pop(self):
|
||||
"""Delete the minimum pair in the heap.
|
||||
|
||||
Returns
|
||||
-------
|
||||
key, value : tuple
|
||||
The key-value pair with the minimum value in the heap.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If the heap is empty.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""Returns the value associated with a key.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
key : hashable object
|
||||
The key to be looked up.
|
||||
|
||||
default : object
|
||||
Default value to return if the key is not present in the heap.
|
||||
Default value: None.
|
||||
|
||||
Returns
|
||||
-------
|
||||
value : object.
|
||||
The value associated with the key.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def insert(self, key, value, allow_increase=False):
|
||||
"""Insert a new key-value pair or modify the value in an existing
|
||||
pair.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
key : hashable object
|
||||
The key.
|
||||
|
||||
value : object comparable with existing values.
|
||||
The value.
|
||||
|
||||
allow_increase : bool
|
||||
Whether the value is allowed to increase. If False, attempts to
|
||||
increase an existing value have no effect. Default value: False.
|
||||
|
||||
Returns
|
||||
-------
|
||||
decreased : bool
|
||||
True if a pair is inserted or the existing value is decreased.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __nonzero__(self):
|
||||
"""Returns whether the heap if empty.
|
||||
"""
|
||||
return bool(self._dict)
|
||||
|
||||
def __bool__(self):
|
||||
"""Returns whether the heap if empty.
|
||||
"""
|
||||
return bool(self._dict)
|
||||
|
||||
def __len__(self):
|
||||
"""Returns the number of key-value pairs in the heap.
|
||||
"""
|
||||
return len(self._dict)
|
||||
|
||||
def __contains__(self, key):
|
||||
"""Returns whether a key exists in the heap.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
key : any hashable object.
|
||||
The key to be looked up.
|
||||
"""
|
||||
return key in self._dict
|
||||
|
||||
|
||||
def _inherit_doc(cls):
|
||||
"""Decorator for inheriting docstrings from base classes.
|
||||
"""
|
||||
|
||||
def func(fn):
|
||||
fn.__doc__ = cls.__dict__[fn.__name__].__doc__
|
||||
return fn
|
||||
|
||||
return func
|
||||
|
||||
|
||||
class PairingHeap(MinHeap):
|
||||
"""A pairing heap.
|
||||
"""
|
||||
|
||||
class _Node(MinHeap._Item):
|
||||
"""A node in a pairing heap.
|
||||
|
||||
A tree in a pairing heap is stored using the left-child, right-sibling
|
||||
representation.
|
||||
"""
|
||||
|
||||
__slots__ = ("left", "next", "prev", "parent")
|
||||
|
||||
def __init__(self, key, value):
|
||||
super(PairingHeap._Node, self).__init__(key, value)
|
||||
# The leftmost child.
|
||||
self.left = None
|
||||
# The next sibling.
|
||||
self.next = None
|
||||
# The previous sibling.
|
||||
self.prev = None
|
||||
# The parent.
|
||||
self.parent = None
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize a pairing heap.
|
||||
"""
|
||||
super().__init__()
|
||||
self._root = None
|
||||
|
||||
@_inherit_doc(MinHeap)
|
||||
def min(self):
|
||||
if self._root is None:
|
||||
raise nx.NetworkXError("heap is empty.")
|
||||
return (self._root.key, self._root.value)
|
||||
|
||||
@_inherit_doc(MinHeap)
|
||||
def pop(self):
|
||||
if self._root is None:
|
||||
raise nx.NetworkXError("heap is empty.")
|
||||
min_node = self._root
|
||||
self._root = self._merge_children(self._root)
|
||||
del self._dict[min_node.key]
|
||||
return (min_node.key, min_node.value)
|
||||
|
||||
@_inherit_doc(MinHeap)
|
||||
def get(self, key, default=None):
|
||||
node = self._dict.get(key)
|
||||
return node.value if node is not None else default
|
||||
|
||||
@_inherit_doc(MinHeap)
|
||||
def insert(self, key, value, allow_increase=False):
|
||||
node = self._dict.get(key)
|
||||
root = self._root
|
||||
if node is not None:
|
||||
if value < node.value:
|
||||
node.value = value
|
||||
if node is not root and value < node.parent.value:
|
||||
self._cut(node)
|
||||
self._root = self._link(root, node)
|
||||
return True
|
||||
elif allow_increase and value > node.value:
|
||||
node.value = value
|
||||
child = self._merge_children(node)
|
||||
# Nonstandard step: Link the merged subtree with the root. See
|
||||
# below for the standard step.
|
||||
if child is not None:
|
||||
self._root = self._link(self._root, child)
|
||||
# Standard step: Perform a decrease followed by a pop as if the
|
||||
# value were the smallest in the heap. Then insert the new
|
||||
# value into the heap.
|
||||
# if node is not root:
|
||||
# self._cut(node)
|
||||
# if child is not None:
|
||||
# root = self._link(root, child)
|
||||
# self._root = self._link(root, node)
|
||||
# else:
|
||||
# self._root = (self._link(node, child)
|
||||
# if child is not None else node)
|
||||
return False
|
||||
else:
|
||||
# Insert a new key.
|
||||
node = self._Node(key, value)
|
||||
self._dict[key] = node
|
||||
self._root = self._link(root, node) if root is not None else node
|
||||
return True
|
||||
|
||||
def _link(self, root, other):
|
||||
"""Link two nodes, making the one with the smaller value the parent of
|
||||
the other.
|
||||
"""
|
||||
if other.value < root.value:
|
||||
root, other = other, root
|
||||
next = root.left
|
||||
other.next = next
|
||||
if next is not None:
|
||||
next.prev = other
|
||||
other.prev = None
|
||||
root.left = other
|
||||
other.parent = root
|
||||
return root
|
||||
|
||||
def _merge_children(self, root):
|
||||
"""Merge the subtrees of the root using the standard two-pass method.
|
||||
The resulting subtree is detached from the root.
|
||||
"""
|
||||
node = root.left
|
||||
root.left = None
|
||||
if node is not None:
|
||||
link = self._link
|
||||
# Pass 1: Merge pairs of consecutive subtrees from left to right.
|
||||
# At the end of the pass, only the prev pointers of the resulting
|
||||
# subtrees have meaningful values. The other pointers will be fixed
|
||||
# in pass 2.
|
||||
prev = None
|
||||
while True:
|
||||
next = node.next
|
||||
if next is None:
|
||||
node.prev = prev
|
||||
break
|
||||
next_next = next.next
|
||||
node = link(node, next)
|
||||
node.prev = prev
|
||||
prev = node
|
||||
if next_next is None:
|
||||
break
|
||||
node = next_next
|
||||
# Pass 2: Successively merge the subtrees produced by pass 1 from
|
||||
# right to left with the rightmost one.
|
||||
prev = node.prev
|
||||
while prev is not None:
|
||||
prev_prev = prev.prev
|
||||
node = link(prev, node)
|
||||
prev = prev_prev
|
||||
# Now node can become the new root. Its has no parent nor siblings.
|
||||
node.prev = None
|
||||
node.next = None
|
||||
node.parent = None
|
||||
return node
|
||||
|
||||
def _cut(self, node):
|
||||
"""Cut a node from its parent.
|
||||
"""
|
||||
prev = node.prev
|
||||
next = node.next
|
||||
if prev is not None:
|
||||
prev.next = next
|
||||
else:
|
||||
node.parent.left = next
|
||||
node.prev = None
|
||||
if next is not None:
|
||||
next.prev = prev
|
||||
node.next = None
|
||||
node.parent = None
|
||||
|
||||
|
||||
class BinaryHeap(MinHeap):
|
||||
"""A binary heap.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize a binary heap.
|
||||
"""
|
||||
super().__init__()
|
||||
self._heap = []
|
||||
self._count = count()
|
||||
|
||||
@_inherit_doc(MinHeap)
|
||||
def min(self):
|
||||
dict = self._dict
|
||||
if not dict:
|
||||
raise nx.NetworkXError("heap is empty")
|
||||
heap = self._heap
|
||||
pop = heappop
|
||||
# Repeatedly remove stale key-value pairs until a up-to-date one is
|
||||
# met.
|
||||
while True:
|
||||
value, _, key = heap[0]
|
||||
if key in dict and value == dict[key]:
|
||||
break
|
||||
pop(heap)
|
||||
return (key, value)
|
||||
|
||||
@_inherit_doc(MinHeap)
|
||||
def pop(self):
|
||||
dict = self._dict
|
||||
if not dict:
|
||||
raise nx.NetworkXError("heap is empty")
|
||||
heap = self._heap
|
||||
pop = heappop
|
||||
# Repeatedly remove stale key-value pairs until a up-to-date one is
|
||||
# met.
|
||||
while True:
|
||||
value, _, key = heap[0]
|
||||
pop(heap)
|
||||
if key in dict and value == dict[key]:
|
||||
break
|
||||
del dict[key]
|
||||
return (key, value)
|
||||
|
||||
@_inherit_doc(MinHeap)
|
||||
def get(self, key, default=None):
|
||||
return self._dict.get(key, default)
|
||||
|
||||
@_inherit_doc(MinHeap)
|
||||
def insert(self, key, value, allow_increase=False):
|
||||
dict = self._dict
|
||||
if key in dict:
|
||||
old_value = dict[key]
|
||||
if value < old_value or (allow_increase and value > old_value):
|
||||
# Since there is no way to efficiently obtain the location of a
|
||||
# key-value pair in the heap, insert a new pair even if ones
|
||||
# with the same key may already be present. Deem the old ones
|
||||
# as stale and skip them when the minimum pair is queried.
|
||||
dict[key] = value
|
||||
heappush(self._heap, (value, next(self._count), key))
|
||||
return value < old_value
|
||||
return False
|
||||
else:
|
||||
dict[key] = value
|
||||
heappush(self._heap, (value, next(self._count), key))
|
||||
return True
|
182
venv/Lib/site-packages/networkx/utils/mapped_queue.py
Normal file
182
venv/Lib/site-packages/networkx/utils/mapped_queue.py
Normal file
|
@ -0,0 +1,182 @@
|
|||
"""Priority queue class with updatable priorities.
|
||||
"""
|
||||
|
||||
import heapq
|
||||
|
||||
__all__ = ["MappedQueue"]
|
||||
|
||||
|
||||
class MappedQueue:
|
||||
"""The MappedQueue class implements an efficient minimum heap. The
|
||||
smallest element can be popped in O(1) time, new elements can be pushed
|
||||
in O(log n) time, and any element can be removed or updated in O(log n)
|
||||
time. The queue cannot contain duplicate elements and an attempt to push an
|
||||
element already in the queue will have no effect.
|
||||
|
||||
MappedQueue complements the heapq package from the python standard
|
||||
library. While MappedQueue is designed for maximum compatibility with
|
||||
heapq, it has slightly different functionality.
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
A `MappedQueue` can be created empty or optionally given an array of
|
||||
initial elements. Calling `push()` will add an element and calling `pop()`
|
||||
will remove and return the smallest element.
|
||||
|
||||
>>> q = MappedQueue([916, 50, 4609, 493, 237])
|
||||
>>> q.push(1310)
|
||||
True
|
||||
>>> x = [q.pop() for i in range(len(q.h))]
|
||||
>>> x
|
||||
[50, 237, 493, 916, 1310, 4609]
|
||||
|
||||
Elements can also be updated or removed from anywhere in the queue.
|
||||
|
||||
>>> q = MappedQueue([916, 50, 4609, 493, 237])
|
||||
>>> q.remove(493)
|
||||
>>> q.update(237, 1117)
|
||||
>>> x = [q.pop() for i in range(len(q.h))]
|
||||
>>> x
|
||||
[50, 916, 1117, 4609]
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Cormen, T. H., Leiserson, C. E., Rivest, R. L., & Stein, C. (2001).
|
||||
Introduction to algorithms second edition.
|
||||
.. [2] Knuth, D. E. (1997). The art of computer programming (Vol. 3).
|
||||
Pearson Education.
|
||||
"""
|
||||
|
||||
def __init__(self, data=[]):
|
||||
"""Priority queue class with updatable priorities.
|
||||
"""
|
||||
self.h = list(data)
|
||||
self.d = dict()
|
||||
self._heapify()
|
||||
|
||||
def __len__(self):
|
||||
return len(self.h)
|
||||
|
||||
def _heapify(self):
|
||||
"""Restore heap invariant and recalculate map."""
|
||||
heapq.heapify(self.h)
|
||||
self.d = {elt: pos for pos, elt in enumerate(self.h)}
|
||||
if len(self.h) != len(self.d):
|
||||
raise AssertionError("Heap contains duplicate elements")
|
||||
|
||||
def push(self, elt):
|
||||
"""Add an element to the queue."""
|
||||
# If element is already in queue, do nothing
|
||||
if elt in self.d:
|
||||
return False
|
||||
# Add element to heap and dict
|
||||
pos = len(self.h)
|
||||
self.h.append(elt)
|
||||
self.d[elt] = pos
|
||||
# Restore invariant by sifting down
|
||||
self._siftdown(pos)
|
||||
return True
|
||||
|
||||
def pop(self):
|
||||
"""Remove and return the smallest element in the queue."""
|
||||
# Remove smallest element
|
||||
elt = self.h[0]
|
||||
del self.d[elt]
|
||||
# If elt is last item, remove and return
|
||||
if len(self.h) == 1:
|
||||
self.h.pop()
|
||||
return elt
|
||||
# Replace root with last element
|
||||
last = self.h.pop()
|
||||
self.h[0] = last
|
||||
self.d[last] = 0
|
||||
# Restore invariant by sifting up, then down
|
||||
pos = self._siftup(0)
|
||||
self._siftdown(pos)
|
||||
# Return smallest element
|
||||
return elt
|
||||
|
||||
def update(self, elt, new):
|
||||
"""Replace an element in the queue with a new one."""
|
||||
# Replace
|
||||
pos = self.d[elt]
|
||||
self.h[pos] = new
|
||||
del self.d[elt]
|
||||
self.d[new] = pos
|
||||
# Restore invariant by sifting up, then down
|
||||
pos = self._siftup(pos)
|
||||
self._siftdown(pos)
|
||||
|
||||
def remove(self, elt):
|
||||
"""Remove an element from the queue."""
|
||||
# Find and remove element
|
||||
try:
|
||||
pos = self.d[elt]
|
||||
del self.d[elt]
|
||||
except KeyError:
|
||||
# Not in queue
|
||||
raise
|
||||
# If elt is last item, remove and return
|
||||
if pos == len(self.h) - 1:
|
||||
self.h.pop()
|
||||
return
|
||||
# Replace elt with last element
|
||||
last = self.h.pop()
|
||||
self.h[pos] = last
|
||||
self.d[last] = pos
|
||||
# Restore invariant by sifting up, then down
|
||||
pos = self._siftup(pos)
|
||||
self._siftdown(pos)
|
||||
|
||||
def _siftup(self, pos):
|
||||
"""Move element at pos down to a leaf by repeatedly moving the smaller
|
||||
child up."""
|
||||
h, d = self.h, self.d
|
||||
elt = h[pos]
|
||||
# Continue until element is in a leaf
|
||||
end_pos = len(h)
|
||||
left_pos = (pos << 1) + 1
|
||||
while left_pos < end_pos:
|
||||
# Left child is guaranteed to exist by loop predicate
|
||||
left = h[left_pos]
|
||||
try:
|
||||
right_pos = left_pos + 1
|
||||
right = h[right_pos]
|
||||
# Out-of-place, swap with left unless right is smaller
|
||||
if right < left:
|
||||
h[pos], h[right_pos] = right, elt
|
||||
pos, right_pos = right_pos, pos
|
||||
d[elt], d[right] = pos, right_pos
|
||||
else:
|
||||
h[pos], h[left_pos] = left, elt
|
||||
pos, left_pos = left_pos, pos
|
||||
d[elt], d[left] = pos, left_pos
|
||||
except IndexError:
|
||||
# Left leaf is the end of the heap, swap
|
||||
h[pos], h[left_pos] = left, elt
|
||||
pos, left_pos = left_pos, pos
|
||||
d[elt], d[left] = pos, left_pos
|
||||
# Update left_pos
|
||||
left_pos = (pos << 1) + 1
|
||||
return pos
|
||||
|
||||
def _siftdown(self, pos):
|
||||
"""Restore invariant by repeatedly replacing out-of-place element with
|
||||
its parent."""
|
||||
h, d = self.h, self.d
|
||||
elt = h[pos]
|
||||
# Continue until element is at root
|
||||
while pos > 0:
|
||||
parent_pos = (pos - 1) >> 1
|
||||
parent = h[parent_pos]
|
||||
if parent > elt:
|
||||
# Swap out-of-place element with parent
|
||||
h[parent_pos], h[pos] = elt, parent
|
||||
parent_pos, pos = pos, parent_pos
|
||||
d[elt] = pos
|
||||
d[parent] = parent_pos
|
||||
else:
|
||||
# Invariant is satisfied
|
||||
break
|
||||
return pos
|
415
venv/Lib/site-packages/networkx/utils/misc.py
Normal file
415
venv/Lib/site-packages/networkx/utils/misc.py
Normal file
|
@ -0,0 +1,415 @@
|
|||
"""
|
||||
Miscellaneous Helpers for NetworkX.
|
||||
|
||||
These are not imported into the base networkx namespace but
|
||||
can be accessed, for example, as
|
||||
|
||||
>>> import networkx
|
||||
>>> networkx.utils.is_list_of_ints([1, 2, 3])
|
||||
True
|
||||
>>> networkx.utils.is_list_of_ints([1, 2, "spam"])
|
||||
False
|
||||
"""
|
||||
|
||||
from collections import defaultdict
|
||||
from collections import deque
|
||||
import warnings
|
||||
import sys
|
||||
import uuid
|
||||
from itertools import tee, chain
|
||||
import networkx as nx
|
||||
|
||||
|
||||
# some cookbook stuff
|
||||
# used in deciding whether something is a bunch of nodes, edges, etc.
|
||||
# see G.add_nodes and others in Graph Class in networkx/base.py
|
||||
|
||||
|
||||
def is_string_like(obj): # from John Hunter, types-free version
|
||||
"""Check if obj is string."""
|
||||
msg = (
|
||||
"is_string_like is deprecated and will be removed in 3.0."
|
||||
"Use isinstance(obj, str) instead."
|
||||
)
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
return isinstance(obj, str)
|
||||
|
||||
|
||||
def iterable(obj):
|
||||
""" Return True if obj is iterable with a well-defined len()."""
|
||||
if hasattr(obj, "__iter__"):
|
||||
return True
|
||||
try:
|
||||
len(obj)
|
||||
except:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def empty_generator():
|
||||
""" Return a generator with no members """
|
||||
yield from ()
|
||||
|
||||
|
||||
def flatten(obj, result=None):
|
||||
""" Return flattened version of (possibly nested) iterable object. """
|
||||
if not iterable(obj) or is_string_like(obj):
|
||||
return obj
|
||||
if result is None:
|
||||
result = []
|
||||
for item in obj:
|
||||
if not iterable(item) or is_string_like(item):
|
||||
result.append(item)
|
||||
else:
|
||||
flatten(item, result)
|
||||
return obj.__class__(result)
|
||||
|
||||
|
||||
def make_list_of_ints(sequence):
|
||||
"""Return list of ints from sequence of integral numbers.
|
||||
|
||||
All elements of the sequence must satisfy int(element) == element
|
||||
or a ValueError is raised. Sequence is iterated through once.
|
||||
|
||||
If sequence is a list, the non-int values are replaced with ints.
|
||||
So, no new list is created
|
||||
"""
|
||||
if not isinstance(sequence, list):
|
||||
result = []
|
||||
for i in sequence:
|
||||
errmsg = f"sequence is not all integers: {i}"
|
||||
try:
|
||||
ii = int(i)
|
||||
except ValueError:
|
||||
raise nx.NetworkXError(errmsg) from None
|
||||
if ii != i:
|
||||
raise nx.NetworkXError(errmsg)
|
||||
result.append(ii)
|
||||
return result
|
||||
# original sequence is a list... in-place conversion to ints
|
||||
for indx, i in enumerate(sequence):
|
||||
errmsg = f"sequence is not all integers: {i}"
|
||||
if isinstance(i, int):
|
||||
continue
|
||||
try:
|
||||
ii = int(i)
|
||||
except ValueError:
|
||||
raise nx.NetworkXError(errmsg) from None
|
||||
if ii != i:
|
||||
raise nx.NetworkXError(errmsg)
|
||||
sequence[indx] = ii
|
||||
return sequence
|
||||
|
||||
|
||||
def is_list_of_ints(intlist):
|
||||
""" Return True if list is a list of ints. """
|
||||
if not isinstance(intlist, list):
|
||||
return False
|
||||
for i in intlist:
|
||||
if not isinstance(i, int):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def make_str(x):
|
||||
"""Returns the string representation of t."""
|
||||
msg = "make_str is deprecated and will be removed in 3.0. Use str instead."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
return str(x)
|
||||
|
||||
|
||||
def generate_unique_node():
|
||||
""" Generate a unique node label."""
|
||||
return str(uuid.uuid1())
|
||||
|
||||
|
||||
def default_opener(filename):
|
||||
"""Opens `filename` using system's default program.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
filename : str
|
||||
The path of the file to be opened.
|
||||
|
||||
"""
|
||||
from subprocess import call
|
||||
|
||||
cmds = {
|
||||
"darwin": ["open"],
|
||||
"linux": ["xdg-open"],
|
||||
"linux2": ["xdg-open"],
|
||||
"win32": ["cmd.exe", "/C", "start", ""],
|
||||
}
|
||||
cmd = cmds[sys.platform] + [filename]
|
||||
call(cmd)
|
||||
|
||||
|
||||
def dict_to_numpy_array(d, mapping=None):
|
||||
"""Convert a dictionary of dictionaries to a numpy array
|
||||
with optional mapping."""
|
||||
try:
|
||||
return dict_to_numpy_array2(d, mapping)
|
||||
except (AttributeError, TypeError):
|
||||
# AttributeError is when no mapping was provided and v.keys() fails.
|
||||
# TypeError is when a mapping was provided and d[k1][k2] fails.
|
||||
return dict_to_numpy_array1(d, mapping)
|
||||
|
||||
|
||||
def dict_to_numpy_array2(d, mapping=None):
|
||||
"""Convert a dictionary of dictionaries to a 2d numpy array
|
||||
with optional mapping.
|
||||
|
||||
"""
|
||||
import numpy
|
||||
|
||||
if mapping is None:
|
||||
s = set(d.keys())
|
||||
for k, v in d.items():
|
||||
s.update(v.keys())
|
||||
mapping = dict(zip(s, range(len(s))))
|
||||
n = len(mapping)
|
||||
a = numpy.zeros((n, n))
|
||||
for k1, i in mapping.items():
|
||||
for k2, j in mapping.items():
|
||||
try:
|
||||
a[i, j] = d[k1][k2]
|
||||
except KeyError:
|
||||
pass
|
||||
return a
|
||||
|
||||
|
||||
def dict_to_numpy_array1(d, mapping=None):
|
||||
"""Convert a dictionary of numbers to a 1d numpy array
|
||||
with optional mapping.
|
||||
|
||||
"""
|
||||
import numpy
|
||||
|
||||
if mapping is None:
|
||||
s = set(d.keys())
|
||||
mapping = dict(zip(s, range(len(s))))
|
||||
n = len(mapping)
|
||||
a = numpy.zeros(n)
|
||||
for k1, i in mapping.items():
|
||||
i = mapping[k1]
|
||||
a[i] = d[k1]
|
||||
return a
|
||||
|
||||
|
||||
def is_iterator(obj):
|
||||
"""Returns True if and only if the given object is an iterator
|
||||
object.
|
||||
|
||||
"""
|
||||
has_next_attr = hasattr(obj, "__next__") or hasattr(obj, "next")
|
||||
return iter(obj) is obj and has_next_attr
|
||||
|
||||
|
||||
def arbitrary_element(iterable):
|
||||
"""Returns an arbitrary element of `iterable` without removing it.
|
||||
|
||||
This is most useful for "peeking" at an arbitrary element of a set,
|
||||
but can be used for any list, dictionary, etc., as well::
|
||||
|
||||
>>> arbitrary_element({3, 2, 1})
|
||||
1
|
||||
>>> arbitrary_element("hello")
|
||||
'h'
|
||||
|
||||
This function raises a :exc:`ValueError` if `iterable` is an
|
||||
iterator (because the current implementation of this function would
|
||||
consume an element from the iterator)::
|
||||
|
||||
>>> iterator = iter([1, 2, 3])
|
||||
>>> arbitrary_element(iterator)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: cannot return an arbitrary item from an iterator
|
||||
|
||||
"""
|
||||
if is_iterator(iterable):
|
||||
raise ValueError("cannot return an arbitrary item from an iterator")
|
||||
# Another possible implementation is ``for x in iterable: return x``.
|
||||
return next(iter(iterable))
|
||||
|
||||
|
||||
# Recipe from the itertools documentation.
|
||||
def consume(iterator):
|
||||
"Consume the iterator entirely."
|
||||
# Feed the entire iterator into a zero-length deque.
|
||||
deque(iterator, maxlen=0)
|
||||
|
||||
|
||||
# Recipe from the itertools documentation.
|
||||
def pairwise(iterable, cyclic=False):
|
||||
"s -> (s0, s1), (s1, s2), (s2, s3), ..."
|
||||
a, b = tee(iterable)
|
||||
first = next(b, None)
|
||||
if cyclic is True:
|
||||
return zip(a, chain(b, (first,)))
|
||||
return zip(a, b)
|
||||
|
||||
|
||||
def groups(many_to_one):
|
||||
"""Converts a many-to-one mapping into a one-to-many mapping.
|
||||
|
||||
`many_to_one` must be a dictionary whose keys and values are all
|
||||
:term:`hashable`.
|
||||
|
||||
The return value is a dictionary mapping values from `many_to_one`
|
||||
to sets of keys from `many_to_one` that have that value.
|
||||
|
||||
For example::
|
||||
|
||||
>>> from networkx.utils import groups
|
||||
>>> many_to_one = {"a": 1, "b": 1, "c": 2, "d": 3, "e": 3}
|
||||
>>> groups(many_to_one) # doctest: +SKIP
|
||||
{1: {'a', 'b'}, 2: {'c'}, 3: {'d', 'e'}}
|
||||
|
||||
"""
|
||||
one_to_many = defaultdict(set)
|
||||
for v, k in many_to_one.items():
|
||||
one_to_many[k].add(v)
|
||||
return dict(one_to_many)
|
||||
|
||||
|
||||
def to_tuple(x):
|
||||
"""Converts lists to tuples.
|
||||
|
||||
For example::
|
||||
|
||||
>>> from networkx.utils import to_tuple
|
||||
>>> a_list = [1, 2, [1, 4]]
|
||||
>>> to_tuple(a_list)
|
||||
(1, 2, (1, 4))
|
||||
|
||||
"""
|
||||
if not isinstance(x, (tuple, list)):
|
||||
return x
|
||||
return tuple(map(to_tuple, x))
|
||||
|
||||
|
||||
def create_random_state(random_state=None):
|
||||
"""Returns a numpy.random.RandomState instance depending on input.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
random_state : int or RandomState instance or None optional (default=None)
|
||||
If int, return a numpy.random.RandomState instance set with seed=int.
|
||||
if numpy.random.RandomState instance, return it.
|
||||
if None or numpy.random, return the global random number generator used
|
||||
by numpy.random.
|
||||
"""
|
||||
import numpy as np
|
||||
|
||||
if random_state is None or random_state is np.random:
|
||||
return np.random.mtrand._rand
|
||||
if isinstance(random_state, np.random.RandomState):
|
||||
return random_state
|
||||
if isinstance(random_state, int):
|
||||
return np.random.RandomState(random_state)
|
||||
msg = (
|
||||
f"{random_state} cannot be used to generate a numpy.random.RandomState instance"
|
||||
)
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
class PythonRandomInterface:
|
||||
try:
|
||||
|
||||
def __init__(self, rng=None):
|
||||
import numpy
|
||||
|
||||
if rng is None:
|
||||
self._rng = numpy.random.mtrand._rand
|
||||
self._rng = rng
|
||||
|
||||
except ImportError:
|
||||
msg = "numpy not found, only random.random available."
|
||||
warnings.warn(msg, ImportWarning)
|
||||
|
||||
def random(self):
|
||||
return self._rng.random_sample()
|
||||
|
||||
def uniform(self, a, b):
|
||||
return a + (b - a) * self._rng.random_sample()
|
||||
|
||||
def randrange(self, a, b=None):
|
||||
return self._rng.randint(a, b)
|
||||
|
||||
def choice(self, seq):
|
||||
return seq[self._rng.randint(0, len(seq))]
|
||||
|
||||
def gauss(self, mu, sigma):
|
||||
return self._rng.normal(mu, sigma)
|
||||
|
||||
def shuffle(self, seq):
|
||||
return self._rng.shuffle(seq)
|
||||
|
||||
# Some methods don't match API for numpy RandomState.
|
||||
# Commented out versions are not used by NetworkX
|
||||
|
||||
def sample(self, seq, k):
|
||||
return self._rng.choice(list(seq), size=(k,), replace=False)
|
||||
|
||||
def randint(self, a, b):
|
||||
return self._rng.randint(a, b + 1)
|
||||
|
||||
# exponential as expovariate with 1/argument,
|
||||
def expovariate(self, scale):
|
||||
return self._rng.exponential(1 / scale)
|
||||
|
||||
# pareto as paretovariate with 1/argument,
|
||||
def paretovariate(self, shape):
|
||||
return self._rng.pareto(shape)
|
||||
|
||||
|
||||
# weibull as weibullvariate multiplied by beta,
|
||||
# def weibullvariate(self, alpha, beta):
|
||||
# return self._rng.weibull(alpha) * beta
|
||||
#
|
||||
# def triangular(self, low, high, mode):
|
||||
# return self._rng.triangular(low, mode, high)
|
||||
#
|
||||
# def choices(self, seq, weights=None, cum_weights=None, k=1):
|
||||
# return self._rng.choice(seq
|
||||
|
||||
|
||||
def create_py_random_state(random_state=None):
|
||||
"""Returns a random.Random instance depending on input.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
random_state : int or random number generator or None (default=None)
|
||||
If int, return a random.Random instance set with seed=int.
|
||||
if random.Random instance, return it.
|
||||
if None or the `random` package, return the global random number
|
||||
generator used by `random`.
|
||||
if np.random package, return the global numpy random number
|
||||
generator wrapped in a PythonRandomInterface class.
|
||||
if np.random.RandomState instance, return it wrapped in
|
||||
PythonRandomInterface
|
||||
if a PythonRandomInterface instance, return it
|
||||
"""
|
||||
import random
|
||||
|
||||
try:
|
||||
import numpy as np
|
||||
|
||||
if random_state is np.random:
|
||||
return PythonRandomInterface(np.random.mtrand._rand)
|
||||
if isinstance(random_state, np.random.RandomState):
|
||||
return PythonRandomInterface(random_state)
|
||||
if isinstance(random_state, PythonRandomInterface):
|
||||
return random_state
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if random_state is None or random_state is random:
|
||||
return random._inst
|
||||
if isinstance(random_state, random.Random):
|
||||
return random_state
|
||||
if isinstance(random_state, int):
|
||||
return random.Random(random_state)
|
||||
msg = f"{random_state} cannot be used to generate a random.Random instance"
|
||||
raise ValueError(msg)
|
155
venv/Lib/site-packages/networkx/utils/random_sequence.py
Normal file
155
venv/Lib/site-packages/networkx/utils/random_sequence.py
Normal file
|
@ -0,0 +1,155 @@
|
|||
"""
|
||||
Utilities for generating random numbers, random sequences, and
|
||||
random selections.
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import py_random_state
|
||||
|
||||
|
||||
# The same helpers for choosing random sequences from distributions
|
||||
# uses Python's random module
|
||||
# https://docs.python.org/3/library/random.html
|
||||
|
||||
|
||||
@py_random_state(2)
|
||||
def powerlaw_sequence(n, exponent=2.0, seed=None):
|
||||
"""
|
||||
Return sample sequence of length n from a power law distribution.
|
||||
"""
|
||||
return [seed.paretovariate(exponent - 1) for i in range(n)]
|
||||
|
||||
|
||||
@py_random_state(2)
|
||||
def zipf_rv(alpha, xmin=1, seed=None):
|
||||
r"""Returns a random value chosen from the Zipf distribution.
|
||||
|
||||
The return value is an integer drawn from the probability distribution
|
||||
|
||||
.. math::
|
||||
|
||||
p(x)=\frac{x^{-\alpha}}{\zeta(\alpha, x_{\min})},
|
||||
|
||||
where $\zeta(\alpha, x_{\min})$ is the Hurwitz zeta function.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
alpha : float
|
||||
Exponent value of the distribution
|
||||
xmin : int
|
||||
Minimum value
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
|
||||
Returns
|
||||
-------
|
||||
x : int
|
||||
Random value from Zipf distribution
|
||||
|
||||
Raises
|
||||
------
|
||||
ValueError:
|
||||
If xmin < 1 or
|
||||
If alpha <= 1
|
||||
|
||||
Notes
|
||||
-----
|
||||
The rejection algorithm generates random values for a the power-law
|
||||
distribution in uniformly bounded expected time dependent on
|
||||
parameters. See [1]_ for details on its operation.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> nx.utils.zipf_rv(alpha=2, xmin=3, seed=42)
|
||||
8
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Luc Devroye, Non-Uniform Random Variate Generation,
|
||||
Springer-Verlag, New York, 1986.
|
||||
"""
|
||||
if xmin < 1:
|
||||
raise ValueError("xmin < 1")
|
||||
if alpha <= 1:
|
||||
raise ValueError("a <= 1.0")
|
||||
a1 = alpha - 1.0
|
||||
b = 2 ** a1
|
||||
while True:
|
||||
u = 1.0 - seed.random() # u in (0,1]
|
||||
v = seed.random() # v in [0,1)
|
||||
x = int(xmin * u ** -(1.0 / a1))
|
||||
t = (1.0 + (1.0 / x)) ** a1
|
||||
if v * x * (t - 1.0) / (b - 1.0) <= t / b:
|
||||
break
|
||||
return x
|
||||
|
||||
|
||||
def cumulative_distribution(distribution):
|
||||
"""Returns normalized cumulative distribution from discrete distribution."""
|
||||
|
||||
cdf = [0.0]
|
||||
psum = float(sum(distribution))
|
||||
for i in range(0, len(distribution)):
|
||||
cdf.append(cdf[i] + distribution[i] / psum)
|
||||
return cdf
|
||||
|
||||
|
||||
@py_random_state(3)
|
||||
def discrete_sequence(n, distribution=None, cdistribution=None, seed=None):
|
||||
"""
|
||||
Return sample sequence of length n from a given discrete distribution
|
||||
or discrete cumulative distribution.
|
||||
|
||||
One of the following must be specified.
|
||||
|
||||
distribution = histogram of values, will be normalized
|
||||
|
||||
cdistribution = normalized discrete cumulative distribution
|
||||
|
||||
"""
|
||||
import bisect
|
||||
|
||||
if cdistribution is not None:
|
||||
cdf = cdistribution
|
||||
elif distribution is not None:
|
||||
cdf = cumulative_distribution(distribution)
|
||||
else:
|
||||
raise nx.NetworkXError(
|
||||
"discrete_sequence: distribution or cdistribution missing"
|
||||
)
|
||||
|
||||
# get a uniform random number
|
||||
inputseq = [seed.random() for i in range(n)]
|
||||
|
||||
# choose from CDF
|
||||
seq = [bisect.bisect_left(cdf, s) - 1 for s in inputseq]
|
||||
return seq
|
||||
|
||||
|
||||
@py_random_state(2)
|
||||
def random_weighted_sample(mapping, k, seed=None):
|
||||
"""Returns k items without replacement from a weighted sample.
|
||||
|
||||
The input is a dictionary of items with weights as values.
|
||||
"""
|
||||
if k > len(mapping):
|
||||
raise ValueError("sample larger than population")
|
||||
sample = set()
|
||||
while len(sample) < k:
|
||||
sample.add(weighted_choice(mapping, seed))
|
||||
return list(sample)
|
||||
|
||||
|
||||
@py_random_state(1)
|
||||
def weighted_choice(mapping, seed=None):
|
||||
"""Returns a single element from a weighted sample.
|
||||
|
||||
The input is a dictionary of items with weights as values.
|
||||
"""
|
||||
# use roulette method
|
||||
rnd = seed.random() * sum(mapping.values())
|
||||
for k, w in mapping.items():
|
||||
rnd -= w
|
||||
if rnd < 0:
|
||||
return k
|
157
venv/Lib/site-packages/networkx/utils/rcm.py
Normal file
157
venv/Lib/site-packages/networkx/utils/rcm.py
Normal file
|
@ -0,0 +1,157 @@
|
|||
"""
|
||||
Cuthill-McKee ordering of graph nodes to produce sparse matrices
|
||||
"""
|
||||
from collections import deque
|
||||
from operator import itemgetter
|
||||
|
||||
import networkx as nx
|
||||
from ..utils import arbitrary_element
|
||||
|
||||
__all__ = ["cuthill_mckee_ordering", "reverse_cuthill_mckee_ordering"]
|
||||
|
||||
|
||||
def cuthill_mckee_ordering(G, heuristic=None):
|
||||
"""Generate an ordering (permutation) of the graph nodes to make
|
||||
a sparse matrix.
|
||||
|
||||
Uses the Cuthill-McKee heuristic (based on breadth-first search) [1]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
heuristic : function, optional
|
||||
Function to choose starting node for RCM algorithm. If None
|
||||
a node from a pseudo-peripheral pair is used. A user-defined function
|
||||
can be supplied that takes a graph object and returns a single node.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : generator
|
||||
Generator of nodes in Cuthill-McKee ordering.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.utils import cuthill_mckee_ordering
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> rcm = list(cuthill_mckee_ordering(G))
|
||||
>>> A = nx.adjacency_matrix(G, nodelist=rcm)
|
||||
|
||||
Smallest degree node as heuristic function:
|
||||
|
||||
>>> def smallest_degree(G):
|
||||
... return min(G, key=G.degree)
|
||||
>>> rcm = list(cuthill_mckee_ordering(G, heuristic=smallest_degree))
|
||||
|
||||
|
||||
See Also
|
||||
--------
|
||||
reverse_cuthill_mckee_ordering
|
||||
|
||||
Notes
|
||||
-----
|
||||
The optimal solution the the bandwidth reduction is NP-complete [2]_.
|
||||
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] E. Cuthill and J. McKee.
|
||||
Reducing the bandwidth of sparse symmetric matrices,
|
||||
In Proc. 24th Nat. Conf. ACM, pages 157-172, 1969.
|
||||
http://doi.acm.org/10.1145/800195.805928
|
||||
.. [2] Steven S. Skiena. 1997. The Algorithm Design Manual.
|
||||
Springer-Verlag New York, Inc., New York, NY, USA.
|
||||
"""
|
||||
for c in nx.connected_components(G):
|
||||
yield from connected_cuthill_mckee_ordering(G.subgraph(c), heuristic)
|
||||
|
||||
|
||||
def reverse_cuthill_mckee_ordering(G, heuristic=None):
|
||||
"""Generate an ordering (permutation) of the graph nodes to make
|
||||
a sparse matrix.
|
||||
|
||||
Uses the reverse Cuthill-McKee heuristic (based on breadth-first search)
|
||||
[1]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
heuristic : function, optional
|
||||
Function to choose starting node for RCM algorithm. If None
|
||||
a node from a pseudo-peripheral pair is used. A user-defined function
|
||||
can be supplied that takes a graph object and returns a single node.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : generator
|
||||
Generator of nodes in reverse Cuthill-McKee ordering.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.utils import reverse_cuthill_mckee_ordering
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> rcm = list(reverse_cuthill_mckee_ordering(G))
|
||||
>>> A = nx.adjacency_matrix(G, nodelist=rcm)
|
||||
|
||||
Smallest degree node as heuristic function:
|
||||
|
||||
>>> def smallest_degree(G):
|
||||
... return min(G, key=G.degree)
|
||||
>>> rcm = list(reverse_cuthill_mckee_ordering(G, heuristic=smallest_degree))
|
||||
|
||||
|
||||
See Also
|
||||
--------
|
||||
cuthill_mckee_ordering
|
||||
|
||||
Notes
|
||||
-----
|
||||
The optimal solution the the bandwidth reduction is NP-complete [2]_.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] E. Cuthill and J. McKee.
|
||||
Reducing the bandwidth of sparse symmetric matrices,
|
||||
In Proc. 24th Nat. Conf. ACM, pages 157-72, 1969.
|
||||
http://doi.acm.org/10.1145/800195.805928
|
||||
.. [2] Steven S. Skiena. 1997. The Algorithm Design Manual.
|
||||
Springer-Verlag New York, Inc., New York, NY, USA.
|
||||
"""
|
||||
return reversed(list(cuthill_mckee_ordering(G, heuristic=heuristic)))
|
||||
|
||||
|
||||
def connected_cuthill_mckee_ordering(G, heuristic=None):
|
||||
# the cuthill mckee algorithm for connected graphs
|
||||
if heuristic is None:
|
||||
start = pseudo_peripheral_node(G)
|
||||
else:
|
||||
start = heuristic(G)
|
||||
visited = {start}
|
||||
queue = deque([start])
|
||||
while queue:
|
||||
parent = queue.popleft()
|
||||
yield parent
|
||||
nd = sorted(list(G.degree(set(G[parent]) - visited)), key=itemgetter(1))
|
||||
children = [n for n, d in nd]
|
||||
visited.update(children)
|
||||
queue.extend(children)
|
||||
|
||||
|
||||
def pseudo_peripheral_node(G):
|
||||
# helper for cuthill-mckee to find a node in a "pseudo peripheral pair"
|
||||
# to use as good starting node
|
||||
u = arbitrary_element(G)
|
||||
lp = 0
|
||||
v = u
|
||||
while True:
|
||||
spl = dict(nx.shortest_path_length(G, v))
|
||||
l = max(spl.values())
|
||||
if l <= lp:
|
||||
break
|
||||
lp = l
|
||||
farthest = (n for n, dist in spl.items() if dist == l)
|
||||
v, deg = min(G.degree(farthest), key=itemgetter(1))
|
||||
return v
|
0
venv/Lib/site-packages/networkx/utils/tests/__init__.py
Normal file
0
venv/Lib/site-packages/networkx/utils/tests/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,18 @@
|
|||
import networkx as nx
|
||||
|
||||
|
||||
def test_reversed():
|
||||
G = nx.DiGraph()
|
||||
G.add_edge("A", "B")
|
||||
|
||||
# no exception
|
||||
with nx.utils.reversed(G):
|
||||
pass
|
||||
assert "B" in G["A"]
|
||||
|
||||
# exception
|
||||
try:
|
||||
with nx.utils.reversed(G):
|
||||
raise Exception
|
||||
except:
|
||||
assert "B" in G["A"]
|
300
venv/Lib/site-packages/networkx/utils/tests/test_decorators.py
Normal file
300
venv/Lib/site-packages/networkx/utils/tests/test_decorators.py
Normal file
|
@ -0,0 +1,300 @@
|
|||
import tempfile
|
||||
import os
|
||||
import pathlib
|
||||
import random
|
||||
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils.decorators import open_file, not_implemented_for
|
||||
from networkx.utils.decorators import (
|
||||
preserve_random_state,
|
||||
py_random_state,
|
||||
np_random_state,
|
||||
random_state,
|
||||
)
|
||||
from networkx.utils.misc import PythonRandomInterface
|
||||
|
||||
|
||||
def test_not_implemented_decorator():
|
||||
@not_implemented_for("directed")
|
||||
def test1(G):
|
||||
pass
|
||||
|
||||
test1(nx.Graph())
|
||||
|
||||
|
||||
def test_not_implemented_decorator_key():
|
||||
with pytest.raises(KeyError):
|
||||
|
||||
@not_implemented_for("foo")
|
||||
def test1(G):
|
||||
pass
|
||||
|
||||
test1(nx.Graph())
|
||||
|
||||
|
||||
def test_not_implemented_decorator_raise():
|
||||
with pytest.raises(nx.NetworkXNotImplemented):
|
||||
|
||||
@not_implemented_for("graph")
|
||||
def test1(G):
|
||||
pass
|
||||
|
||||
test1(nx.Graph())
|
||||
|
||||
|
||||
class TestOpenFileDecorator:
|
||||
def setup_method(self):
|
||||
self.text = ["Blah... ", "BLAH ", "BLAH!!!!"]
|
||||
self.fobj = tempfile.NamedTemporaryFile("wb+", delete=False)
|
||||
self.name = self.fobj.name
|
||||
|
||||
def teardown_method(self):
|
||||
self.fobj.close()
|
||||
os.unlink(self.name)
|
||||
|
||||
def write(self, path):
|
||||
for text in self.text:
|
||||
path.write(text.encode("ascii"))
|
||||
|
||||
@open_file(1, "r")
|
||||
def read(self, path):
|
||||
return path.readlines()[0]
|
||||
|
||||
@staticmethod
|
||||
@open_file(0, "wb")
|
||||
def writer_arg0(path):
|
||||
path.write(b"demo")
|
||||
|
||||
@open_file(1, "wb+")
|
||||
def writer_arg1(self, path):
|
||||
self.write(path)
|
||||
|
||||
@open_file(2, "wb")
|
||||
def writer_arg2default(self, x, path=None):
|
||||
if path is None:
|
||||
with tempfile.NamedTemporaryFile("wb+") as fh:
|
||||
self.write(fh)
|
||||
else:
|
||||
self.write(path)
|
||||
|
||||
@open_file(4, "wb")
|
||||
def writer_arg4default(self, x, y, other="hello", path=None, **kwargs):
|
||||
if path is None:
|
||||
with tempfile.NamedTemporaryFile("wb+") as fh:
|
||||
self.write(fh)
|
||||
else:
|
||||
self.write(path)
|
||||
|
||||
@open_file("path", "wb")
|
||||
def writer_kwarg(self, **kwargs):
|
||||
path = kwargs.get("path", None)
|
||||
if path is None:
|
||||
with tempfile.NamedTemporaryFile("wb+") as fh:
|
||||
self.write(fh)
|
||||
else:
|
||||
self.write(path)
|
||||
|
||||
def test_writer_arg0_str(self):
|
||||
self.writer_arg0(self.name)
|
||||
|
||||
def test_writer_arg0_fobj(self):
|
||||
self.writer_arg0(self.fobj)
|
||||
|
||||
def test_writer_arg0_pathlib(self):
|
||||
self.writer_arg0(pathlib.Path(self.name))
|
||||
|
||||
def test_writer_arg1_str(self):
|
||||
self.writer_arg1(self.name)
|
||||
assert self.read(self.name) == "".join(self.text)
|
||||
|
||||
def test_writer_arg1_fobj(self):
|
||||
self.writer_arg1(self.fobj)
|
||||
assert not self.fobj.closed
|
||||
self.fobj.close()
|
||||
assert self.read(self.name) == "".join(self.text)
|
||||
|
||||
def test_writer_arg2default_str(self):
|
||||
self.writer_arg2default(0, path=None)
|
||||
self.writer_arg2default(0, path=self.name)
|
||||
assert self.read(self.name) == "".join(self.text)
|
||||
|
||||
def test_writer_arg2default_fobj(self):
|
||||
self.writer_arg2default(0, path=self.fobj)
|
||||
assert not self.fobj.closed
|
||||
self.fobj.close()
|
||||
assert self.read(self.name) == "".join(self.text)
|
||||
|
||||
def test_writer_arg2default_fobj_path_none(self):
|
||||
self.writer_arg2default(0, path=None)
|
||||
|
||||
def test_writer_arg4default_fobj(self):
|
||||
self.writer_arg4default(0, 1, dog="dog", other="other")
|
||||
self.writer_arg4default(0, 1, dog="dog", other="other", path=self.name)
|
||||
assert self.read(self.name) == "".join(self.text)
|
||||
|
||||
def test_writer_kwarg_str(self):
|
||||
self.writer_kwarg(path=self.name)
|
||||
assert self.read(self.name) == "".join(self.text)
|
||||
|
||||
def test_writer_kwarg_fobj(self):
|
||||
self.writer_kwarg(path=self.fobj)
|
||||
self.fobj.close()
|
||||
assert self.read(self.name) == "".join(self.text)
|
||||
|
||||
def test_writer_kwarg_path_none(self):
|
||||
self.writer_kwarg(path=None)
|
||||
|
||||
|
||||
@preserve_random_state
|
||||
def test_preserve_random_state():
|
||||
try:
|
||||
import numpy.random
|
||||
|
||||
r = numpy.random.random()
|
||||
except ImportError:
|
||||
return
|
||||
assert abs(r - 0.61879477158568) < 1e-16
|
||||
|
||||
|
||||
class TestRandomState:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
global np
|
||||
np = pytest.importorskip("numpy")
|
||||
|
||||
@random_state(1)
|
||||
def instantiate_random_state(self, random_state):
|
||||
assert isinstance(random_state, np.random.RandomState)
|
||||
return random_state.random_sample()
|
||||
|
||||
@np_random_state(1)
|
||||
def instantiate_np_random_state(self, random_state):
|
||||
assert isinstance(random_state, np.random.RandomState)
|
||||
return random_state.random_sample()
|
||||
|
||||
@py_random_state(1)
|
||||
def instantiate_py_random_state(self, random_state):
|
||||
assert isinstance(random_state, random.Random) or isinstance(
|
||||
random_state, PythonRandomInterface
|
||||
)
|
||||
return random_state.random()
|
||||
|
||||
def test_random_state_None(self):
|
||||
np.random.seed(42)
|
||||
rv = np.random.random_sample()
|
||||
np.random.seed(42)
|
||||
assert rv == self.instantiate_random_state(None)
|
||||
np.random.seed(42)
|
||||
assert rv == self.instantiate_np_random_state(None)
|
||||
|
||||
random.seed(42)
|
||||
rv = random.random()
|
||||
random.seed(42)
|
||||
assert rv == self.instantiate_py_random_state(None)
|
||||
|
||||
def test_random_state_np_random(self):
|
||||
np.random.seed(42)
|
||||
rv = np.random.random_sample()
|
||||
np.random.seed(42)
|
||||
assert rv == self.instantiate_random_state(np.random)
|
||||
np.random.seed(42)
|
||||
assert rv == self.instantiate_np_random_state(np.random)
|
||||
np.random.seed(42)
|
||||
assert rv == self.instantiate_py_random_state(np.random)
|
||||
|
||||
def test_random_state_int(self):
|
||||
np.random.seed(42)
|
||||
np_rv = np.random.random_sample()
|
||||
random.seed(42)
|
||||
py_rv = random.random()
|
||||
|
||||
np.random.seed(42)
|
||||
seed = 1
|
||||
rval = self.instantiate_random_state(seed)
|
||||
rval_expected = np.random.RandomState(seed).rand()
|
||||
assert rval, rval_expected
|
||||
|
||||
rval = self.instantiate_np_random_state(seed)
|
||||
rval_expected = np.random.RandomState(seed).rand()
|
||||
assert rval, rval_expected
|
||||
# test that global seed wasn't changed in function
|
||||
assert np_rv == np.random.random_sample()
|
||||
|
||||
random.seed(42)
|
||||
rval = self.instantiate_py_random_state(seed)
|
||||
rval_expected = random.Random(seed).random()
|
||||
assert rval, rval_expected
|
||||
# test that global seed wasn't changed in function
|
||||
assert py_rv == random.random()
|
||||
|
||||
def test_random_state_np_random_RandomState(self):
|
||||
np.random.seed(42)
|
||||
np_rv = np.random.random_sample()
|
||||
|
||||
np.random.seed(42)
|
||||
seed = 1
|
||||
rng = np.random.RandomState(seed)
|
||||
rval = self.instantiate_random_state(rng)
|
||||
rval_expected = np.random.RandomState(seed).rand()
|
||||
assert rval, rval_expected
|
||||
|
||||
rval = self.instantiate_np_random_state(seed)
|
||||
rval_expected = np.random.RandomState(seed).rand()
|
||||
assert rval, rval_expected
|
||||
|
||||
rval = self.instantiate_py_random_state(seed)
|
||||
rval_expected = np.random.RandomState(seed).rand()
|
||||
assert rval, rval_expected
|
||||
# test that global seed wasn't changed in function
|
||||
assert np_rv == np.random.random_sample()
|
||||
|
||||
def test_random_state_py_random(self):
|
||||
seed = 1
|
||||
rng = random.Random(seed)
|
||||
rv = self.instantiate_py_random_state(rng)
|
||||
assert rv, random.Random(seed).random()
|
||||
|
||||
pytest.raises(ValueError, self.instantiate_random_state, rng)
|
||||
pytest.raises(ValueError, self.instantiate_np_random_state, rng)
|
||||
|
||||
|
||||
def test_random_state_string_arg_index():
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
|
||||
@random_state("a")
|
||||
def make_random_state(rs):
|
||||
pass
|
||||
|
||||
rstate = make_random_state(1)
|
||||
|
||||
|
||||
def test_py_random_state_string_arg_index():
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
|
||||
@py_random_state("a")
|
||||
def make_random_state(rs):
|
||||
pass
|
||||
|
||||
rstate = make_random_state(1)
|
||||
|
||||
|
||||
def test_random_state_invalid_arg_index():
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
|
||||
@random_state(2)
|
||||
def make_random_state(rs):
|
||||
pass
|
||||
|
||||
rstate = make_random_state(1)
|
||||
|
||||
|
||||
def test_py_random_state_invalid_arg_index():
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
|
||||
@py_random_state(2)
|
||||
def make_random_state(rs):
|
||||
pass
|
||||
|
||||
rstate = make_random_state(1)
|
130
venv/Lib/site-packages/networkx/utils/tests/test_heaps.py
Normal file
130
venv/Lib/site-packages/networkx/utils/tests/test_heaps.py
Normal file
|
@ -0,0 +1,130 @@
|
|||
import pytest
|
||||
import networkx as nx
|
||||
from networkx.utils import BinaryHeap, PairingHeap
|
||||
|
||||
|
||||
class X:
|
||||
def __eq__(self, other):
|
||||
raise self is other
|
||||
|
||||
def __ne__(self, other):
|
||||
raise self is not other
|
||||
|
||||
def __lt__(self, other):
|
||||
raise TypeError("cannot compare")
|
||||
|
||||
def __le__(self, other):
|
||||
raise TypeError("cannot compare")
|
||||
|
||||
def __ge__(self, other):
|
||||
raise TypeError("cannot compare")
|
||||
|
||||
def __gt__(self, other):
|
||||
raise TypeError("cannot compare")
|
||||
|
||||
def __hash__(self):
|
||||
return hash(id(self))
|
||||
|
||||
|
||||
x = X()
|
||||
|
||||
|
||||
data = [ # min should not invent an element.
|
||||
("min", nx.NetworkXError),
|
||||
# Popping an empty heap should fail.
|
||||
("pop", nx.NetworkXError),
|
||||
# Getting nonexisting elements should return None.
|
||||
("get", 0, None),
|
||||
("get", x, None),
|
||||
("get", None, None),
|
||||
# Inserting a new key should succeed.
|
||||
("insert", x, 1, True),
|
||||
("get", x, 1),
|
||||
("min", (x, 1)),
|
||||
# min should not pop the top element.
|
||||
("min", (x, 1)),
|
||||
# Inserting a new key of different type should succeed.
|
||||
("insert", 1, -2.0, True),
|
||||
# int and float values should interop.
|
||||
("min", (1, -2.0)),
|
||||
# pop removes minimum-valued element.
|
||||
("insert", 3, -(10 ** 100), True),
|
||||
("insert", 4, 5, True),
|
||||
("pop", (3, -(10 ** 100))),
|
||||
("pop", (1, -2.0)),
|
||||
# Decrease-insert should succeed.
|
||||
("insert", 4, -50, True),
|
||||
("insert", 4, -60, False, True),
|
||||
# Decrease-insert should not create duplicate keys.
|
||||
("pop", (4, -60)),
|
||||
("pop", (x, 1)),
|
||||
# Popping all elements should empty the heap.
|
||||
("min", nx.NetworkXError),
|
||||
("pop", nx.NetworkXError),
|
||||
# Non-value-changing insert should fail.
|
||||
("insert", x, 0, True),
|
||||
("insert", x, 0, False, False),
|
||||
("min", (x, 0)),
|
||||
("insert", x, 0, True, False),
|
||||
("min", (x, 0)),
|
||||
# Failed insert should not create duplicate keys.
|
||||
("pop", (x, 0)),
|
||||
("pop", nx.NetworkXError),
|
||||
# Increase-insert should succeed when allowed.
|
||||
("insert", None, 0, True),
|
||||
("insert", 2, -1, True),
|
||||
("min", (2, -1)),
|
||||
("insert", 2, 1, True, False),
|
||||
("min", (None, 0)),
|
||||
# Increase-insert should fail when disallowed.
|
||||
("insert", None, 2, False, False),
|
||||
("min", (None, 0)),
|
||||
# Failed increase-insert should not create duplicate keys.
|
||||
("pop", (None, 0)),
|
||||
("pop", (2, 1)),
|
||||
("min", nx.NetworkXError),
|
||||
("pop", nx.NetworkXError),
|
||||
]
|
||||
|
||||
|
||||
def _test_heap_class(cls, *args, **kwargs):
|
||||
heap = cls(*args, **kwargs)
|
||||
# Basic behavioral test
|
||||
for op in data:
|
||||
if op[-1] is not nx.NetworkXError:
|
||||
assert op[-1] == getattr(heap, op[0])(*op[1:-1])
|
||||
else:
|
||||
pytest.raises(op[-1], getattr(heap, op[0]), *op[1:-1])
|
||||
# Coverage test.
|
||||
for i in range(99, -1, -1):
|
||||
assert heap.insert(i, i)
|
||||
for i in range(50):
|
||||
assert heap.pop() == (i, i)
|
||||
for i in range(100):
|
||||
assert heap.insert(i, i) == (i < 50)
|
||||
for i in range(100):
|
||||
assert not heap.insert(i, i + 1)
|
||||
for i in range(50):
|
||||
assert heap.pop() == (i, i)
|
||||
for i in range(100):
|
||||
assert heap.insert(i, i + 1) == (i < 50)
|
||||
for i in range(49):
|
||||
assert heap.pop() == (i, i + 1)
|
||||
assert sorted([heap.pop(), heap.pop()]) == [(49, 50), (50, 50)]
|
||||
for i in range(51, 100):
|
||||
assert not heap.insert(i, i + 1, True)
|
||||
for i in range(51, 70):
|
||||
assert heap.pop() == (i, i + 1)
|
||||
for i in range(100):
|
||||
assert heap.insert(i, i)
|
||||
for i in range(100):
|
||||
assert heap.pop() == (i, i)
|
||||
pytest.raises(nx.NetworkXError, heap.pop)
|
||||
|
||||
|
||||
def test_PairingHeap():
|
||||
_test_heap_class(PairingHeap)
|
||||
|
||||
|
||||
def test_BinaryHeap():
|
||||
_test_heap_class(BinaryHeap)
|
157
venv/Lib/site-packages/networkx/utils/tests/test_mapped_queue.py
Normal file
157
venv/Lib/site-packages/networkx/utils/tests/test_mapped_queue.py
Normal file
|
@ -0,0 +1,157 @@
|
|||
from networkx.utils.mapped_queue import MappedQueue
|
||||
|
||||
|
||||
class TestMappedQueue:
|
||||
def setup(self):
|
||||
pass
|
||||
|
||||
def _check_map(self, q):
|
||||
d = {elt: pos for pos, elt in enumerate(q.h)}
|
||||
assert d == q.d
|
||||
|
||||
def _make_mapped_queue(self, h):
|
||||
q = MappedQueue()
|
||||
q.h = h
|
||||
q.d = {elt: pos for pos, elt in enumerate(h)}
|
||||
return q
|
||||
|
||||
def test_heapify(self):
|
||||
h = [5, 4, 3, 2, 1, 0]
|
||||
q = self._make_mapped_queue(h)
|
||||
q._heapify()
|
||||
self._check_map(q)
|
||||
|
||||
def test_init(self):
|
||||
h = [5, 4, 3, 2, 1, 0]
|
||||
q = MappedQueue(h)
|
||||
self._check_map(q)
|
||||
|
||||
def test_len(self):
|
||||
h = [5, 4, 3, 2, 1, 0]
|
||||
q = MappedQueue(h)
|
||||
self._check_map(q)
|
||||
assert len(q) == 6
|
||||
|
||||
def test_siftup_leaf(self):
|
||||
h = [2]
|
||||
h_sifted = [2]
|
||||
q = self._make_mapped_queue(h)
|
||||
q._siftup(0)
|
||||
assert q.h == h_sifted
|
||||
self._check_map(q)
|
||||
|
||||
def test_siftup_one_child(self):
|
||||
h = [2, 0]
|
||||
h_sifted = [0, 2]
|
||||
q = self._make_mapped_queue(h)
|
||||
q._siftup(0)
|
||||
assert q.h == h_sifted
|
||||
self._check_map(q)
|
||||
|
||||
def test_siftup_left_child(self):
|
||||
h = [2, 0, 1]
|
||||
h_sifted = [0, 2, 1]
|
||||
q = self._make_mapped_queue(h)
|
||||
q._siftup(0)
|
||||
assert q.h == h_sifted
|
||||
self._check_map(q)
|
||||
|
||||
def test_siftup_right_child(self):
|
||||
h = [2, 1, 0]
|
||||
h_sifted = [0, 1, 2]
|
||||
q = self._make_mapped_queue(h)
|
||||
q._siftup(0)
|
||||
assert q.h == h_sifted
|
||||
self._check_map(q)
|
||||
|
||||
def test_siftup_multiple(self):
|
||||
h = [0, 1, 2, 4, 3, 5, 6]
|
||||
h_sifted = [1, 3, 2, 4, 0, 5, 6]
|
||||
q = self._make_mapped_queue(h)
|
||||
q._siftup(0)
|
||||
assert q.h == h_sifted
|
||||
self._check_map(q)
|
||||
|
||||
def test_siftdown_leaf(self):
|
||||
h = [2]
|
||||
h_sifted = [2]
|
||||
q = self._make_mapped_queue(h)
|
||||
q._siftdown(0)
|
||||
assert q.h == h_sifted
|
||||
self._check_map(q)
|
||||
|
||||
def test_siftdown_single(self):
|
||||
h = [1, 0]
|
||||
h_sifted = [0, 1]
|
||||
q = self._make_mapped_queue(h)
|
||||
q._siftdown(len(h) - 1)
|
||||
assert q.h == h_sifted
|
||||
self._check_map(q)
|
||||
|
||||
def test_siftdown_multiple(self):
|
||||
h = [1, 2, 3, 4, 5, 6, 7, 0]
|
||||
h_sifted = [0, 1, 3, 2, 5, 6, 7, 4]
|
||||
q = self._make_mapped_queue(h)
|
||||
q._siftdown(len(h) - 1)
|
||||
assert q.h == h_sifted
|
||||
self._check_map(q)
|
||||
|
||||
def test_push(self):
|
||||
to_push = [6, 1, 4, 3, 2, 5, 0]
|
||||
h_sifted = [0, 2, 1, 6, 3, 5, 4]
|
||||
q = MappedQueue()
|
||||
for elt in to_push:
|
||||
q.push(elt)
|
||||
assert q.h == h_sifted
|
||||
self._check_map(q)
|
||||
|
||||
def test_push_duplicate(self):
|
||||
to_push = [2, 1, 0]
|
||||
h_sifted = [0, 2, 1]
|
||||
q = MappedQueue()
|
||||
for elt in to_push:
|
||||
inserted = q.push(elt)
|
||||
assert inserted
|
||||
assert q.h == h_sifted
|
||||
self._check_map(q)
|
||||
inserted = q.push(1)
|
||||
assert not inserted
|
||||
|
||||
def test_pop(self):
|
||||
h = [3, 4, 6, 0, 1, 2, 5]
|
||||
h_sorted = sorted(h)
|
||||
q = self._make_mapped_queue(h)
|
||||
q._heapify()
|
||||
popped = []
|
||||
for elt in sorted(h):
|
||||
popped.append(q.pop())
|
||||
assert popped == h_sorted
|
||||
self._check_map(q)
|
||||
|
||||
def test_remove_leaf(self):
|
||||
h = [0, 2, 1, 6, 3, 5, 4]
|
||||
h_removed = [0, 2, 1, 6, 4, 5]
|
||||
q = self._make_mapped_queue(h)
|
||||
removed = q.remove(3)
|
||||
assert q.h == h_removed
|
||||
|
||||
def test_remove_root(self):
|
||||
h = [0, 2, 1, 6, 3, 5, 4]
|
||||
h_removed = [1, 2, 4, 6, 3, 5]
|
||||
q = self._make_mapped_queue(h)
|
||||
removed = q.remove(0)
|
||||
assert q.h == h_removed
|
||||
|
||||
def test_update_leaf(self):
|
||||
h = [0, 20, 10, 60, 30, 50, 40]
|
||||
h_updated = [0, 15, 10, 60, 20, 50, 40]
|
||||
q = self._make_mapped_queue(h)
|
||||
removed = q.update(30, 15)
|
||||
assert q.h == h_updated
|
||||
|
||||
def test_update_root(self):
|
||||
h = [0, 20, 10, 60, 30, 50, 40]
|
||||
h_updated = [10, 20, 35, 60, 30, 50, 40]
|
||||
q = self._make_mapped_queue(h)
|
||||
removed = q.update(0, 35)
|
||||
assert q.h == h_updated
|
222
venv/Lib/site-packages/networkx/utils/tests/test_misc.py
Normal file
222
venv/Lib/site-packages/networkx/utils/tests/test_misc.py
Normal file
|
@ -0,0 +1,222 @@
|
|||
import pytest
|
||||
import networkx as nx
|
||||
import random
|
||||
from networkx.utils import (
|
||||
create_py_random_state,
|
||||
create_random_state,
|
||||
discrete_sequence,
|
||||
dict_to_numpy_array,
|
||||
dict_to_numpy_array1,
|
||||
dict_to_numpy_array2,
|
||||
is_string_like,
|
||||
iterable,
|
||||
groups,
|
||||
make_list_of_ints,
|
||||
make_str,
|
||||
pairwise,
|
||||
powerlaw_sequence,
|
||||
PythonRandomInterface,
|
||||
to_tuple,
|
||||
)
|
||||
|
||||
|
||||
def test_is_string_like():
|
||||
assert is_string_like("aaaa")
|
||||
assert not is_string_like(None)
|
||||
assert not is_string_like(123)
|
||||
|
||||
|
||||
def test_iterable():
|
||||
assert not iterable(None)
|
||||
assert not iterable(10)
|
||||
assert iterable([1, 2, 3])
|
||||
assert iterable((1, 2, 3))
|
||||
assert iterable({1: "A", 2: "X"})
|
||||
assert iterable("ABC")
|
||||
|
||||
|
||||
def test_graph_iterable():
|
||||
K = nx.complete_graph(10)
|
||||
assert iterable(K)
|
||||
assert iterable(K.nodes())
|
||||
assert iterable(K.edges())
|
||||
|
||||
|
||||
def test_make_list_of_ints():
|
||||
mylist = [1, 2, 3.0, 42, -2]
|
||||
assert make_list_of_ints(mylist) is mylist
|
||||
assert make_list_of_ints(mylist) == mylist
|
||||
assert type(make_list_of_ints(mylist)[2]) is int
|
||||
pytest.raises(nx.NetworkXError, make_list_of_ints, [1, 2, 3, "kermit"])
|
||||
pytest.raises(nx.NetworkXError, make_list_of_ints, [1, 2, 3.1])
|
||||
|
||||
|
||||
def test_random_number_distribution():
|
||||
# smoke test only
|
||||
z = powerlaw_sequence(20, exponent=2.5)
|
||||
z = discrete_sequence(20, distribution=[0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3])
|
||||
|
||||
|
||||
def test_make_str_with_bytes():
|
||||
x = "qualité"
|
||||
y = make_str(x)
|
||||
assert isinstance(y, str)
|
||||
assert len(y) == 7
|
||||
|
||||
|
||||
def test_make_str_with_unicode():
|
||||
x = "qualité"
|
||||
y = make_str(x)
|
||||
assert isinstance(y, str)
|
||||
assert len(y) == 7
|
||||
|
||||
|
||||
class TestNumpyArray:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
global numpy
|
||||
global assert_allclose
|
||||
numpy = pytest.importorskip("numpy")
|
||||
assert_allclose = numpy.testing.assert_allclose
|
||||
|
||||
def test_numpy_to_list_of_ints(self):
|
||||
a = numpy.array([1, 2, 3], dtype=numpy.int64)
|
||||
b = numpy.array([1.0, 2, 3])
|
||||
c = numpy.array([1.1, 2, 3])
|
||||
assert type(make_list_of_ints(a)) == list
|
||||
assert make_list_of_ints(b) == list(b)
|
||||
B = make_list_of_ints(b)
|
||||
assert type(B[0]) == int
|
||||
pytest.raises(nx.NetworkXError, make_list_of_ints, c)
|
||||
|
||||
def test_dict_to_numpy_array1(self):
|
||||
d = {"a": 1, "b": 2}
|
||||
a = dict_to_numpy_array1(d, mapping={"a": 0, "b": 1})
|
||||
assert_allclose(a, numpy.array([1, 2]))
|
||||
a = dict_to_numpy_array1(d, mapping={"b": 0, "a": 1})
|
||||
assert_allclose(a, numpy.array([2, 1]))
|
||||
|
||||
a = dict_to_numpy_array1(d)
|
||||
assert_allclose(a.sum(), 3)
|
||||
|
||||
def test_dict_to_numpy_array2(self):
|
||||
d = {"a": {"a": 1, "b": 2}, "b": {"a": 10, "b": 20}}
|
||||
|
||||
mapping = {"a": 1, "b": 0}
|
||||
a = dict_to_numpy_array2(d, mapping=mapping)
|
||||
assert_allclose(a, numpy.array([[20, 10], [2, 1]]))
|
||||
|
||||
a = dict_to_numpy_array2(d)
|
||||
assert_allclose(a.sum(), 33)
|
||||
|
||||
def test_dict_to_numpy_array_a(self):
|
||||
d = {"a": {"a": 1, "b": 2}, "b": {"a": 10, "b": 20}}
|
||||
|
||||
mapping = {"a": 0, "b": 1}
|
||||
a = dict_to_numpy_array(d, mapping=mapping)
|
||||
assert_allclose(a, numpy.array([[1, 2], [10, 20]]))
|
||||
|
||||
mapping = {"a": 1, "b": 0}
|
||||
a = dict_to_numpy_array(d, mapping=mapping)
|
||||
assert_allclose(a, numpy.array([[20, 10], [2, 1]]))
|
||||
|
||||
a = dict_to_numpy_array2(d)
|
||||
assert_allclose(a.sum(), 33)
|
||||
|
||||
def test_dict_to_numpy_array_b(self):
|
||||
d = {"a": 1, "b": 2}
|
||||
|
||||
mapping = {"a": 0, "b": 1}
|
||||
a = dict_to_numpy_array(d, mapping=mapping)
|
||||
assert_allclose(a, numpy.array([1, 2]))
|
||||
|
||||
a = dict_to_numpy_array1(d)
|
||||
assert_allclose(a.sum(), 3)
|
||||
|
||||
|
||||
def test_pairwise():
|
||||
nodes = range(4)
|
||||
node_pairs = [(0, 1), (1, 2), (2, 3)]
|
||||
node_pairs_cycle = node_pairs + [(3, 0)]
|
||||
assert list(pairwise(nodes)) == node_pairs
|
||||
assert list(pairwise(iter(nodes))) == node_pairs
|
||||
assert list(pairwise(nodes, cyclic=True)) == node_pairs_cycle
|
||||
empty_iter = iter(())
|
||||
assert list(pairwise(empty_iter)) == []
|
||||
empty_iter = iter(())
|
||||
assert list(pairwise(empty_iter, cyclic=True)) == []
|
||||
|
||||
|
||||
def test_groups():
|
||||
many_to_one = dict(zip("abcde", [0, 0, 1, 1, 2]))
|
||||
actual = groups(many_to_one)
|
||||
expected = {0: {"a", "b"}, 1: {"c", "d"}, 2: {"e"}}
|
||||
assert actual == expected
|
||||
assert {} == groups({})
|
||||
|
||||
|
||||
def test_to_tuple():
|
||||
a_list = [1, 2, [1, 3]]
|
||||
actual = to_tuple(a_list)
|
||||
expected = (1, 2, (1, 3))
|
||||
assert actual == expected
|
||||
|
||||
a_tuple = (1, 2)
|
||||
actual = to_tuple(a_tuple)
|
||||
expected = a_tuple
|
||||
assert actual == expected
|
||||
|
||||
a_mix = (1, 2, [1, 3])
|
||||
actual = to_tuple(a_mix)
|
||||
expected = (1, 2, (1, 3))
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_create_random_state():
|
||||
np = pytest.importorskip("numpy")
|
||||
rs = np.random.RandomState
|
||||
|
||||
assert isinstance(create_random_state(1), rs)
|
||||
assert isinstance(create_random_state(None), rs)
|
||||
assert isinstance(create_random_state(np.random), rs)
|
||||
assert isinstance(create_random_state(rs(1)), rs)
|
||||
pytest.raises(ValueError, create_random_state, "a")
|
||||
|
||||
assert np.all(rs(1).rand(10) == create_random_state(1).rand(10))
|
||||
|
||||
|
||||
def test_create_py_random_state():
|
||||
pyrs = random.Random
|
||||
|
||||
assert isinstance(create_py_random_state(1), pyrs)
|
||||
assert isinstance(create_py_random_state(None), pyrs)
|
||||
assert isinstance(create_py_random_state(pyrs(1)), pyrs)
|
||||
pytest.raises(ValueError, create_py_random_state, "a")
|
||||
|
||||
np = pytest.importorskip("numpy")
|
||||
|
||||
rs = np.random.RandomState
|
||||
nprs = PythonRandomInterface
|
||||
assert isinstance(create_py_random_state(np.random), nprs)
|
||||
assert isinstance(create_py_random_state(rs(1)), nprs)
|
||||
# test default rng input
|
||||
assert isinstance(PythonRandomInterface(), nprs)
|
||||
|
||||
|
||||
def test_PythonRandomInterface():
|
||||
np = pytest.importorskip("numpy")
|
||||
rs = np.random.RandomState
|
||||
rng = PythonRandomInterface(rs(42))
|
||||
rs42 = rs(42)
|
||||
|
||||
# make sure these functions are same as expected outcome
|
||||
assert rng.randrange(3, 5) == rs42.randint(3, 5)
|
||||
assert np.all(rng.choice([1, 2, 3]) == rs42.choice([1, 2, 3]))
|
||||
assert rng.gauss(0, 1) == rs42.normal(0, 1)
|
||||
assert rng.expovariate(1.5) == rs42.exponential(1 / 1.5)
|
||||
assert np.all(rng.shuffle([1, 2, 3]) == rs42.shuffle([1, 2, 3]))
|
||||
assert np.all(
|
||||
rng.sample([1, 2, 3], 2) == rs42.choice([1, 2, 3], (2,), replace=False)
|
||||
)
|
||||
assert rng.randint(3, 5) == rs42.randint(3, 6)
|
||||
assert rng.random() == rs42.random_sample()
|
|
@ -0,0 +1,37 @@
|
|||
import pytest
|
||||
from networkx.utils import (
|
||||
powerlaw_sequence,
|
||||
zipf_rv,
|
||||
random_weighted_sample,
|
||||
weighted_choice,
|
||||
)
|
||||
|
||||
|
||||
def test_degree_sequences():
|
||||
seq = powerlaw_sequence(10, seed=1)
|
||||
seq = powerlaw_sequence(10)
|
||||
assert len(seq) == 10
|
||||
|
||||
|
||||
def test_zipf_rv():
|
||||
r = zipf_rv(2.3, xmin=2, seed=1)
|
||||
r = zipf_rv(2.3, 2, 1)
|
||||
r = zipf_rv(2.3)
|
||||
assert type(r), int
|
||||
pytest.raises(ValueError, zipf_rv, 0.5)
|
||||
pytest.raises(ValueError, zipf_rv, 2, xmin=0)
|
||||
|
||||
|
||||
def test_random_weighted_sample():
|
||||
mapping = {"a": 10, "b": 20}
|
||||
s = random_weighted_sample(mapping, 2, seed=1)
|
||||
s = random_weighted_sample(mapping, 2)
|
||||
assert sorted(s) == sorted(mapping.keys())
|
||||
pytest.raises(ValueError, random_weighted_sample, mapping, 3)
|
||||
|
||||
|
||||
def test_random_weighted_choice():
|
||||
mapping = {"a": 10, "b": 0}
|
||||
c = weighted_choice(mapping, seed=1)
|
||||
c = weighted_choice(mapping)
|
||||
assert c == "a"
|
63
venv/Lib/site-packages/networkx/utils/tests/test_rcm.py
Normal file
63
venv/Lib/site-packages/networkx/utils/tests/test_rcm.py
Normal file
|
@ -0,0 +1,63 @@
|
|||
from networkx.utils import reverse_cuthill_mckee_ordering
|
||||
import networkx as nx
|
||||
|
||||
|
||||
def test_reverse_cuthill_mckee():
|
||||
# example graph from
|
||||
# http://www.boost.org/doc/libs/1_37_0/libs/graph/example/cuthill_mckee_ordering.cpp
|
||||
G = nx.Graph(
|
||||
[
|
||||
(0, 3),
|
||||
(0, 5),
|
||||
(1, 2),
|
||||
(1, 4),
|
||||
(1, 6),
|
||||
(1, 9),
|
||||
(2, 3),
|
||||
(2, 4),
|
||||
(3, 5),
|
||||
(3, 8),
|
||||
(4, 6),
|
||||
(5, 6),
|
||||
(5, 7),
|
||||
(6, 7),
|
||||
]
|
||||
)
|
||||
rcm = list(reverse_cuthill_mckee_ordering(G))
|
||||
assert rcm in [[0, 8, 5, 7, 3, 6, 2, 4, 1, 9], [0, 8, 5, 7, 3, 6, 4, 2, 1, 9]]
|
||||
|
||||
|
||||
def test_rcm_alternate_heuristic():
|
||||
# example from
|
||||
G = nx.Graph(
|
||||
[
|
||||
(0, 0),
|
||||
(0, 4),
|
||||
(1, 1),
|
||||
(1, 2),
|
||||
(1, 5),
|
||||
(1, 7),
|
||||
(2, 2),
|
||||
(2, 4),
|
||||
(3, 3),
|
||||
(3, 6),
|
||||
(4, 4),
|
||||
(5, 5),
|
||||
(5, 7),
|
||||
(6, 6),
|
||||
(7, 7),
|
||||
]
|
||||
)
|
||||
|
||||
answers = [
|
||||
[6, 3, 5, 7, 1, 2, 4, 0],
|
||||
[6, 3, 7, 5, 1, 2, 4, 0],
|
||||
[7, 5, 1, 2, 4, 0, 6, 3],
|
||||
]
|
||||
|
||||
def smallest_degree(G):
|
||||
deg, node = min((d, n) for n, d in G.degree())
|
||||
return node
|
||||
|
||||
rcm = list(reverse_cuthill_mckee_ordering(G, heuristic=smallest_degree))
|
||||
assert rcm in answers
|
|
@ -0,0 +1,42 @@
|
|||
import networkx as nx
|
||||
|
||||
|
||||
def test_unionfind():
|
||||
# Fixed by: 2cddd5958689bdecdcd89b91ac9aaf6ce0e4f6b8
|
||||
# Previously (in 2.x), the UnionFind class could handle mixed types.
|
||||
# But in Python 3.x, this causes a TypeError such as:
|
||||
# TypeError: unorderable types: str() > int()
|
||||
#
|
||||
# Now we just make sure that no exception is raised.
|
||||
x = nx.utils.UnionFind()
|
||||
x.union(0, "a")
|
||||
|
||||
|
||||
def test_subtree_union():
|
||||
# See https://github.com/networkx/networkx/pull/3224
|
||||
# (35db1b551ee65780794a357794f521d8768d5049).
|
||||
# Test if subtree unions hare handled correctly by to_sets().
|
||||
uf = nx.utils.UnionFind()
|
||||
uf.union(1, 2)
|
||||
uf.union(3, 4)
|
||||
uf.union(4, 5)
|
||||
uf.union(1, 5)
|
||||
assert list(uf.to_sets()) == [{1, 2, 3, 4, 5}]
|
||||
|
||||
|
||||
def test_unionfind_weights():
|
||||
# Tests if weights are computed correctly with unions of many elements
|
||||
uf = nx.utils.UnionFind()
|
||||
uf.union(1, 4, 7)
|
||||
uf.union(2, 5, 8)
|
||||
uf.union(3, 6, 9)
|
||||
uf.union(1, 2, 3, 4, 5, 6, 7, 8, 9)
|
||||
assert uf.weights[uf[1]] == 9
|
||||
|
||||
|
||||
def test_empty_union():
|
||||
# Tests if a null-union does nothing.
|
||||
uf = nx.utils.UnionFind((0, 1))
|
||||
uf.union()
|
||||
assert uf[0] == 0
|
||||
assert uf[1] == 1
|
103
venv/Lib/site-packages/networkx/utils/union_find.py
Normal file
103
venv/Lib/site-packages/networkx/utils/union_find.py
Normal file
|
@ -0,0 +1,103 @@
|
|||
"""
|
||||
Union-find data structure.
|
||||
"""
|
||||
|
||||
from networkx.utils import groups
|
||||
|
||||
|
||||
class UnionFind:
|
||||
"""Union-find data structure.
|
||||
|
||||
Each unionFind instance X maintains a family of disjoint sets of
|
||||
hashable objects, supporting the following two methods:
|
||||
|
||||
- X[item] returns a name for the set containing the given item.
|
||||
Each set is named by an arbitrarily-chosen one of its members; as
|
||||
long as the set remains unchanged it will keep the same name. If
|
||||
the item is not yet part of a set in X, a new singleton set is
|
||||
created for it.
|
||||
|
||||
- X.union(item1, item2, ...) merges the sets containing each item
|
||||
into a single larger set. If any item is not yet part of a set
|
||||
in X, it is added to X as one of the members of the merged set.
|
||||
|
||||
Union-find data structure. Based on Josiah Carlson's code,
|
||||
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/215912
|
||||
with significant additional changes by D. Eppstein.
|
||||
http://www.ics.uci.edu/~eppstein/PADS/UnionFind.py
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, elements=None):
|
||||
"""Create a new empty union-find structure.
|
||||
|
||||
If *elements* is an iterable, this structure will be initialized
|
||||
with the discrete partition on the given set of elements.
|
||||
|
||||
"""
|
||||
if elements is None:
|
||||
elements = ()
|
||||
self.parents = {}
|
||||
self.weights = {}
|
||||
for x in elements:
|
||||
self.weights[x] = 1
|
||||
self.parents[x] = x
|
||||
|
||||
def __getitem__(self, object):
|
||||
"""Find and return the name of the set containing the object."""
|
||||
|
||||
# check for previously unknown object
|
||||
if object not in self.parents:
|
||||
self.parents[object] = object
|
||||
self.weights[object] = 1
|
||||
return object
|
||||
|
||||
# find path of objects leading to the root
|
||||
path = [object]
|
||||
root = self.parents[object]
|
||||
while root != path[-1]:
|
||||
path.append(root)
|
||||
root = self.parents[root]
|
||||
|
||||
# compress the path and return
|
||||
for ancestor in path:
|
||||
self.parents[ancestor] = root
|
||||
return root
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterate through all items ever found or unioned by this structure.
|
||||
|
||||
"""
|
||||
return iter(self.parents)
|
||||
|
||||
def to_sets(self):
|
||||
"""Iterates over the sets stored in this structure.
|
||||
|
||||
For example::
|
||||
|
||||
>>> partition = UnionFind("xyz")
|
||||
>>> sorted(map(sorted, partition.to_sets()))
|
||||
[['x'], ['y'], ['z']]
|
||||
>>> partition.union("x", "y")
|
||||
>>> sorted(map(sorted, partition.to_sets()))
|
||||
[['x', 'y'], ['z']]
|
||||
|
||||
"""
|
||||
# Ensure fully pruned paths
|
||||
for x in self.parents.keys():
|
||||
_ = self[x] # Evaluated for side-effect only
|
||||
|
||||
yield from groups(self.parents).values()
|
||||
|
||||
def union(self, *objects):
|
||||
"""Find the sets containing the objects and merge them all."""
|
||||
# Find the heaviest root according to its weight.
|
||||
roots = iter(sorted({self[x] for x in objects}, key=lambda r: self.weights[r]))
|
||||
try:
|
||||
root = next(roots)
|
||||
except StopIteration:
|
||||
return
|
||||
|
||||
for r in roots:
|
||||
self.weights[root] += self.weights[r]
|
||||
self.parents[r] = root
|
Loading…
Add table
Add a link
Reference in a new issue