From 9cc8b422a512e7e7819238d597fd6815ec9c1c8e Mon Sep 17 00:00:00 2001 From: peijenburg Date: Sat, 9 Dec 2023 23:49:55 +0100 Subject: [PATCH 001/129] Add Kirchhoff index / Effective graph resistance (#6926) * Add Kirchhoff index Add the computation of the Kirchhoff index to networkx * minor fixes * scipy not necessary * scipy not necessary * style fixes * small doc change * change digraph, add test * vectorise final computation * style fix * minor cleanup tests * Add Kirchhoff index Add the computation of the Kirchhoff index to networkx * minor fixes * scipy not necessary * scipy not necessary * style fixes * small doc change * change digraph, add test * vectorise final computation * style fix * minor cleanup tests * fix * change name * remove return var --- .../algorithms/distance_measures.rst | 1 + networkx/algorithms/distance_measures.py | 93 ++++++++++++++++++- .../tests/test_distance_measures.py | 85 +++++++++++++++++ 3 files changed, 177 insertions(+), 2 deletions(-) diff --git a/doc/reference/algorithms/distance_measures.rst b/doc/reference/algorithms/distance_measures.rst index cd54a4afa49..172e5b6ddb8 100644 --- a/doc/reference/algorithms/distance_measures.rst +++ b/doc/reference/algorithms/distance_measures.rst @@ -10,6 +10,7 @@ Distance Measures center diameter eccentricity + effective_graph_resistance kemeny_constant periphery radius diff --git a/networkx/algorithms/distance_measures.py b/networkx/algorithms/distance_measures.py index 09bde0cc157..04f88b17b6a 100644 --- a/networkx/algorithms/distance_measures.py +++ b/networkx/algorithms/distance_measures.py @@ -12,6 +12,7 @@ "barycenter", "resistance_distance", "kemeny_constant", + "effective_graph_resistance", ] @@ -634,7 +635,7 @@ def barycenter(G, weight=None, attr=None, sp=None): @not_implemented_for("directed") @nx._dispatch(edge_attrs="weight") def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=True): - """Returns the resistance distance between every pair of nodes on graph G. + """Returns the resistance distance between pairs of nodes in graph G. The resistance distance between two nodes of a graph is akin to treating the graph as a grid of resistors with a resistance equal to the provided @@ -763,6 +764,94 @@ def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=Tr return d +@not_implemented_for("directed") +@nx._dispatch(edge_attrs="weight") +def effective_graph_resistance(G, weight=None, invert_weight=True): + """Returns the Effective graph resistance of G. + + Also known as the Kirchhoff index. + + The effective graph resistance is defined as the sum + of the resistance distance of every node pair in G [1]_. + + If weight is not provided, then a weight of 1 is used for all edges. + + The effective graph resistance of a disconnected graph is infinite. + + Parameters + ---------- + G : NetworkX graph + A graph + + weight : string or None, optional (default=None) + The edge data key used to compute the effective graph resistance. + If None, then each edge has weight 1. + + invert_weight : boolean (default=True) + Proper calculation of resistance distance requires building the + Laplacian matrix with the reciprocal of the weight. Not required + if the weight is already inverted. Weight cannot be zero. + + Returns + ------- + RG : float + The effective graph resistance of `G`. + + Raises + ------- + NetworkXNotImplemented + If `G` is a directed graph. + + NetworkXError + If `G` does not contain any nodes. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> round(nx.effective_graph_resistance(G), 10) + 10.25 + + Notes + ----- + The implementation is based on Theorem 2.2 in [2]_. Self-loops are ignored. + Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights. + + References + ---------- + .. [1] Wolfram + "Kirchhoff Index." + https://mathworld.wolfram.com/KirchhoffIndex.html + .. [2] W. Ellens, F. M. Spieksma, P. Van Mieghem, A. Jamakovic, R. E. Kooij. + Effective graph resistance. + Lin. Alg. Appl. 435:2491-2506, 2011. + """ + import numpy as np + + if len(G) == 0: + raise nx.NetworkXError("Graph G must contain at least one node.") + + # Disconnected graphs have infinite Effective graph resistance + if not nx.is_connected(G): + return np.inf + + # Invert weights + G = G.copy() + if invert_weight and weight is not None: + if G.is_multigraph(): + for u, v, k, d in G.edges(keys=True, data=True): + d[weight] = 1 / d[weight] + else: + for u, v, d in G.edges(data=True): + d[weight] = 1 / d[weight] + + # Get Laplacian eigenvalues + mu = np.sort(nx.laplacian_spectrum(G, weight=weight)) + + # Compute Effective graph resistance based on spectrum of the Laplacian + # Self-loops are ignored + return np.sum(1 / mu[1:]) * G.number_of_nodes() + + @nx.utils.not_implemented_for("directed") @nx._dispatch(edge_attrs="weight") def kemeny_constant(G, *, weight=None): @@ -793,7 +882,7 @@ def kemeny_constant(G, *, weight=None): Returns ------- - K : float + float The Kemeny constant of the graph `G`. Raises diff --git a/networkx/algorithms/tests/test_distance_measures.py b/networkx/algorithms/tests/test_distance_measures.py index d4b5cf2f119..5cfe2cc9cae 100644 --- a/networkx/algorithms/tests/test_distance_measures.py +++ b/networkx/algorithms/tests/test_distance_measures.py @@ -423,6 +423,91 @@ def test_resistance_distance_all(self): assert round(rd[1][3], 5) == 1 +class TestEffectiveGraphResistance: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + + def setup_method(self): + G = nx.Graph() + G.add_edge(1, 2, weight=2) + G.add_edge(1, 3, weight=1) + G.add_edge(2, 3, weight=4) + self.G = G + + def test_effective_graph_resistance_directed_graph(self): + G = nx.DiGraph() + with pytest.raises(nx.NetworkXNotImplemented): + nx.effective_graph_resistance(G) + + def test_effective_graph_resistance_empty(self): + G = nx.Graph() + with pytest.raises(nx.NetworkXError): + nx.effective_graph_resistance(G) + + def test_effective_graph_resistance_not_connected(self): + G = nx.Graph([(1, 2), (3, 4)]) + RG = nx.effective_graph_resistance(G) + assert np.isinf(RG) + + def test_effective_graph_resistance(self): + RG = nx.effective_graph_resistance(self.G, "weight", True) + rd12 = 1 / (1 / (1 + 4) + 1 / 2) + rd13 = 1 / (1 / (1 + 2) + 1 / 4) + rd23 = 1 / (1 / (2 + 4) + 1 / 1) + assert np.isclose(RG, rd12 + rd13 + rd23) + + def test_effective_graph_resistance_noinv(self): + RG = nx.effective_graph_resistance(self.G, "weight", False) + rd12 = 1 / (1 / (1 / 1 + 1 / 4) + 1 / (1 / 2)) + rd13 = 1 / (1 / (1 / 1 + 1 / 2) + 1 / (1 / 4)) + rd23 = 1 / (1 / (1 / 2 + 1 / 4) + 1 / (1 / 1)) + assert np.isclose(RG, rd12 + rd13 + rd23) + + def test_effective_graph_resistance_no_weight(self): + RG = nx.effective_graph_resistance(self.G) + assert np.isclose(RG, 2) + + def test_effective_graph_resistance_neg_weight(self): + self.G[2][3]["weight"] = -4 + RG = nx.effective_graph_resistance(self.G, "weight", True) + rd12 = 1 / (1 / (1 + -4) + 1 / 2) + rd13 = 1 / (1 / (1 + 2) + 1 / (-4)) + rd23 = 1 / (1 / (2 + -4) + 1 / 1) + assert np.isclose(RG, rd12 + rd13 + rd23) + + def test_effective_graph_resistance_multigraph(self): + G = nx.MultiGraph() + G.add_edge(1, 2, weight=2) + G.add_edge(1, 3, weight=1) + G.add_edge(2, 3, weight=1) + G.add_edge(2, 3, weight=3) + RG = nx.effective_graph_resistance(G, "weight", True) + edge23 = 1 / (1 / 1 + 1 / 3) + rd12 = 1 / (1 / (1 + edge23) + 1 / 2) + rd13 = 1 / (1 / (1 + 2) + 1 / edge23) + rd23 = 1 / (1 / (2 + edge23) + 1 / 1) + assert np.isclose(RG, rd12 + rd13 + rd23) + + def test_effective_graph_resistance_div0(self): + with pytest.raises(ZeroDivisionError): + self.G[1][2]["weight"] = 0 + nx.effective_graph_resistance(self.G, "weight") + + def test_effective_graph_resistance_complete_graph(self): + N = 10 + G = nx.complete_graph(N) + RG = nx.effective_graph_resistance(G) + assert np.isclose(RG, N - 1) + + def test_effective_graph_resistance_path_graph(self): + N = 10 + G = nx.path_graph(N) + RG = nx.effective_graph_resistance(G) + assert np.isclose(RG, (N - 1) * N * (N + 1) // 6) + + class TestBarycenter: """Test :func:`networkx.algorithms.distance_measures.barycenter`.""" From d85f3a49d99fc439a1d01f8cb1b1bfb1062d4874 Mon Sep 17 00:00:00 2001 From: Mridul Seth Date: Tue, 12 Dec 2023 23:17:35 +0400 Subject: [PATCH 002/129] DOC: fix URL econded links and doc references (#7152) --- doc/reference/algorithms/dag.rst | 1 + networkx/algorithms/cycles.py | 2 +- networkx/algorithms/dag.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/doc/reference/algorithms/dag.rst b/doc/reference/algorithms/dag.rst index f1cadf8afef..8fb911788f3 100644 --- a/doc/reference/algorithms/dag.rst +++ b/doc/reference/algorithms/dag.rst @@ -21,3 +21,4 @@ Directed Acyclic Graphs dag_longest_path dag_longest_path_length dag_to_branching + compute_v_structures diff --git a/networkx/algorithms/cycles.py b/networkx/algorithms/cycles.py index 9149e9eb10d..5221ca15893 100644 --- a/networkx/algorithms/cycles.py +++ b/networkx/algorithms/cycles.py @@ -1202,7 +1202,7 @@ def girth(G): References ---------- - .. [1] https://en.wikipedia.org/wiki/Girth_(graph_theory) + .. [1] `Wikipedia: Girth `_ """ girth = depth_limit = inf diff --git a/networkx/algorithms/dag.py b/networkx/algorithms/dag.py index fb74df81c6d..58138c340c4 100644 --- a/networkx/algorithms/dag.py +++ b/networkx/algorithms/dag.py @@ -1249,7 +1249,7 @@ def compute_v_structures(G): Notes ----- - https://en.wikipedia.org/wiki/Collider_(statistics) + `Wikipedia: Collider in causal graphs `_ """ for collider, preds in G.pred.items(): for common_parents in combinations(preds, r=2): From dd6d68e27bd8cc626c17e7e8e3231a972c171460 Mon Sep 17 00:00:00 2001 From: Navya Agarwal <82928853+navyagarwal@users.noreply.github.com> Date: Wed, 13 Dec 2023 18:34:26 +0530 Subject: [PATCH 003/129] Changed return types of shortest path methods to improve consistency (#6584) * Fixed return types and modified tests * Resolved check failures in centrality\reaching.py * Fixed doc_str examples * Minor edit --- networkx/algorithms/centrality/reaching.py | 2 +- networkx/algorithms/shortest_paths/generic.py | 8 ++++---- .../algorithms/shortest_paths/tests/test_generic.py | 12 ++++++------ .../shortest_paths/tests/test_unweighted.py | 4 ++-- networkx/algorithms/shortest_paths/unweighted.py | 4 ++-- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/networkx/algorithms/centrality/reaching.py b/networkx/algorithms/centrality/reaching.py index 7b9eac564ac..e93c8e6d8cd 100644 --- a/networkx/algorithms/centrality/reaching.py +++ b/networkx/algorithms/centrality/reaching.py @@ -112,7 +112,7 @@ def as_distance(u, v, d): # TODO This can be trivially parallelized. lrc = [ centrality(G, node, paths=paths, weight=weight, normalized=normalized) - for node, paths in shortest_paths.items() + for node, paths in shortest_paths ] max_lrc = max(lrc) diff --git a/networkx/algorithms/shortest_paths/generic.py b/networkx/algorithms/shortest_paths/generic.py index e47c4b4f5a1..0d1bb5795f9 100644 --- a/networkx/algorithms/shortest_paths/generic.py +++ b/networkx/algorithms/shortest_paths/generic.py @@ -111,7 +111,7 @@ def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"): >>> p = nx.shortest_path(G, target=4) # source not specified >>> p[1] # shortest path from source=1 to target=4 [1, 2, 3, 4] - >>> p = nx.shortest_path(G) # source, target not specified + >>> p = dict(nx.shortest_path(G)) # source, target not specified >>> p[2][4] # shortest path from source=2 to target=4 [2, 3, 4] @@ -140,11 +140,11 @@ def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"): # Find paths between all pairs. if method == "unweighted": - paths = dict(nx.all_pairs_shortest_path(G)) + paths = nx.all_pairs_shortest_path(G) elif method == "dijkstra": - paths = dict(nx.all_pairs_dijkstra_path(G, weight=weight)) + paths = nx.all_pairs_dijkstra_path(G, weight=weight) else: # method == 'bellman-ford': - paths = dict(nx.all_pairs_bellman_ford_path(G, weight=weight)) + paths = nx.all_pairs_bellman_ford_path(G, weight=weight) else: # Find paths from all nodes co-accessible to the target. if G.is_directed(): diff --git a/networkx/algorithms/shortest_paths/tests/test_generic.py b/networkx/algorithms/shortest_paths/tests/test_generic.py index 863f1dcb947..9fcc8c396d5 100644 --- a/networkx/algorithms/shortest_paths/tests/test_generic.py +++ b/networkx/algorithms/shortest_paths/tests/test_generic.py @@ -212,22 +212,22 @@ def test_single_source_all_shortest_paths(self): assert sorted(ans[4]) == [[4]] def test_all_pairs_shortest_path(self): - p = nx.shortest_path(self.cycle) + p = dict(nx.shortest_path(self.cycle)) assert p[0][3] == [0, 1, 2, 3] assert p == dict(nx.all_pairs_shortest_path(self.cycle)) - p = nx.shortest_path(self.grid) + p = dict(nx.shortest_path(self.grid)) validate_grid_path(4, 4, 1, 12, p[1][12]) # now with weights - p = nx.shortest_path(self.cycle, weight="weight") + p = dict(nx.shortest_path(self.cycle, weight="weight")) assert p[0][3] == [0, 1, 2, 3] assert p == dict(nx.all_pairs_dijkstra_path(self.cycle)) - p = nx.shortest_path(self.grid, weight="weight") + p = dict(nx.shortest_path(self.grid, weight="weight")) validate_grid_path(4, 4, 1, 12, p[1][12]) # weights and method specified - p = nx.shortest_path(self.cycle, weight="weight", method="dijkstra") + p = dict(nx.shortest_path(self.cycle, weight="weight", method="dijkstra")) assert p[0][3] == [0, 1, 2, 3] assert p == dict(nx.all_pairs_dijkstra_path(self.cycle)) - p = nx.shortest_path(self.cycle, weight="weight", method="bellman-ford") + p = dict(nx.shortest_path(self.cycle, weight="weight", method="bellman-ford")) assert p[0][3] == [0, 1, 2, 3] assert p == dict(nx.all_pairs_bellman_ford_path(self.cycle)) diff --git a/networkx/algorithms/shortest_paths/tests/test_unweighted.py b/networkx/algorithms/shortest_paths/tests/test_unweighted.py index e2d999518a5..ec0b3f757ed 100644 --- a/networkx/algorithms/shortest_paths/tests/test_unweighted.py +++ b/networkx/algorithms/shortest_paths/tests/test_unweighted.py @@ -92,9 +92,9 @@ def test_single_target_shortest_path(self): def test_single_target_shortest_path_length(self): pl = nx.single_target_shortest_path_length lengths = {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} - assert dict(pl(self.cycle, 0)) == lengths + assert pl(self.cycle, 0) == lengths lengths = {0: 0, 1: 6, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1} - assert dict(pl(self.directed_cycle, 0)) == lengths + assert pl(self.directed_cycle, 0) == lengths # test missing targets target = 8 with pytest.raises(nx.NodeNotFound, match=f"Target {target} is not in G"): diff --git a/networkx/algorithms/shortest_paths/unweighted.py b/networkx/algorithms/shortest_paths/unweighted.py index 1503ba74784..78df41bc389 100644 --- a/networkx/algorithms/shortest_paths/unweighted.py +++ b/networkx/algorithms/shortest_paths/unweighted.py @@ -117,7 +117,7 @@ def single_target_shortest_path_length(G, target, cutoff=None): Examples -------- >>> G = nx.path_graph(5, create_using=nx.DiGraph()) - >>> length = dict(nx.single_target_shortest_path_length(G, 4)) + >>> length = nx.single_target_shortest_path_length(G, 4) >>> length[0] 4 >>> for node in range(5): @@ -145,7 +145,7 @@ def single_target_shortest_path_length(G, target, cutoff=None): nextlevel = [target] # for version 3.3 we will return a dict like this: # return dict(_single_shortest_path_length(adj, nextlevel, cutoff)) - return _single_shortest_path_length(adj, nextlevel, cutoff) + return dict(_single_shortest_path_length(adj, nextlevel, cutoff)) @nx._dispatch From 7abde0036e55539dc3e92d152728fb155c1b0139 Mon Sep 17 00:00:00 2001 From: Robert <44594743+ImHereForTheCookies@users.noreply.github.com> Date: Wed, 13 Dec 2023 08:56:26 -0800 Subject: [PATCH 004/129] adding test coverage for isomorphism when using digraphs (#6417) Co-authored-by: Mridul Seth Co-authored-by: Ross Barnowski --- networkx/algorithms/isomorphism/tests/test_isomorphism.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/networkx/algorithms/isomorphism/tests/test_isomorphism.py b/networkx/algorithms/isomorphism/tests/test_isomorphism.py index c669040390d..548af808ffd 100644 --- a/networkx/algorithms/isomorphism/tests/test_isomorphism.py +++ b/networkx/algorithms/isomorphism/tests/test_isomorphism.py @@ -1,3 +1,5 @@ +import pytest + import networkx as nx from networkx.algorithms import isomorphism as iso @@ -38,3 +40,9 @@ def test_faster_could_be_isomorphic(self): def test_is_isomorphic(self): assert iso.is_isomorphic(self.G1, self.G2) assert not iso.is_isomorphic(self.G1, self.G4) + assert iso.is_isomorphic(self.G1.to_directed(), self.G2.to_directed()) + assert not iso.is_isomorphic(self.G1.to_directed(), self.G4.to_directed()) + with pytest.raises( + nx.NetworkXError, match="Graphs G1 and G2 are not of the same type." + ): + iso.is_isomorphic(self.G1.to_directed(), self.G1) From 1dade0d07ec0818bcdddd76f43fcca7d74a8649e Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Wed, 13 Dec 2023 11:54:59 -0600 Subject: [PATCH 005/129] Remove usage of `__networkx_plugin__` (use `__networkx_backend__` instead) (#7157) Backends should have had enough time to update by now. --- networkx/utils/backends.py | 45 +++++++------------------------------- 1 file changed, 8 insertions(+), 37 deletions(-) diff --git a/networkx/utils/backends.py b/networkx/utils/backends.py index 4ecd1b13bd3..c2d5661e034 100644 --- a/networkx/utils/backends.py +++ b/networkx/utils/backends.py @@ -463,46 +463,29 @@ def __call__(self, /, *args, backend=None, **kwargs): args[self.graphs[gname]] = val has_backends = any( - hasattr(g, "__networkx_backend__") or hasattr(g, "__networkx_plugin__") + hasattr(g, "__networkx_backend__") if gname not in self.list_graphs - else any( - hasattr(g2, "__networkx_backend__") - or hasattr(g2, "__networkx_plugin__") - for g2 in g - ) + else any(hasattr(g2, "__networkx_backend__") for g2 in g) for gname, g in graphs_resolved.items() ) if has_backends: graph_backend_names = { - getattr( - g, - "__networkx_backend__", - getattr(g, "__networkx_plugin__", "networkx"), - ) + getattr(g, "__networkx_backend__", "networkx") for gname, g in graphs_resolved.items() if gname not in self.list_graphs } for gname in self.list_graphs & graphs_resolved.keys(): graph_backend_names.update( - getattr( - g, - "__networkx_backend__", - getattr(g, "__networkx_plugin__", "networkx"), - ) + getattr(g, "__networkx_backend__", "networkx") for g in graphs_resolved[gname] ) else: has_backends = any( - hasattr(g, "__networkx_backend__") or hasattr(g, "__networkx_plugin__") - for g in graphs_resolved.values() + hasattr(g, "__networkx_backend__") for g in graphs_resolved.values() ) if has_backends: graph_backend_names = { - getattr( - g, - "__networkx_backend__", - getattr(g, "__networkx_plugin__", "networkx"), - ) + getattr(g, "__networkx_backend__", "networkx") for g in graphs_resolved.values() } if has_backends: @@ -740,12 +723,7 @@ def _convert_arguments(self, backend_name, args, kwargs): name=self.name, graph_name=gname, ) - if getattr( - g, - "__networkx_backend__", - getattr(g, "__networkx_plugin__", "networkx"), - ) - == "networkx" + if getattr(g, "__networkx_backend__", "networkx") == "networkx" else g for g in bound.arguments[gname] ] @@ -773,14 +751,7 @@ def _convert_arguments(self, backend_name, args, kwargs): preserve_graph = gname in preserve_graph_attrs else: preserve_graph = preserve_graph_attrs - if ( - getattr( - graph, - "__networkx_backend__", - getattr(graph, "__networkx_plugin__", "networkx"), - ) - == "networkx" - ): + if getattr(graph, "__networkx_backend__", "networkx") == "networkx": bound.arguments[gname] = backend.convert_from_nx( graph, edge_attrs=edges, From 1c2fa4b9372df1a1428444d89fe6a72359e1a2cb Mon Sep 17 00:00:00 2001 From: Mridul Seth Date: Fri, 15 Dec 2023 08:28:28 +0400 Subject: [PATCH 006/129] DOC: consistent spelling of neighbor and rename vars (#7162) * DOC: consistent spelling of neighbor and rename vars * rename more vars --- .../algorithms/approximation/treewidth.py | 8 ++--- .../algorithms/centrality/voterank_alg.py | 2 +- networkx/algorithms/cluster.py | 10 +++--- .../algorithms/coloring/greedy_coloring.py | 8 ++--- .../coloring/tests/test_coloring.py | 6 ++-- networkx/algorithms/community/asyn_fluid.py | 4 +-- .../algorithms/community/label_propagation.py | 2 +- networkx/algorithms/community/louvain.py | 4 +-- networkx/algorithms/core.py | 2 +- networkx/algorithms/dominating.py | 6 ++-- networkx/algorithms/graph_hashing.py | 6 ++-- networkx/algorithms/isomorphism/ismags.py | 8 ++--- networkx/algorithms/isomorphism/vf2pp.py | 8 ++--- networkx/algorithms/matching.py | 10 +++--- networkx/algorithms/planar_drawing.py | 14 ++++---- networkx/algorithms/similarity.py | 14 ++++---- networkx/algorithms/sparsifiers.py | 18 +++++----- networkx/algorithms/summarization.py | 36 +++++++++---------- networkx/algorithms/tests/test_mis.py | 4 +-- networkx/classes/graph.py | 4 +-- networkx/generators/duplication.py | 4 +-- networkx/generators/line.py | 6 ++-- networkx/generators/random_graphs.py | 8 ++--- 23 files changed, 95 insertions(+), 97 deletions(-) diff --git a/networkx/algorithms/approximation/treewidth.py b/networkx/algorithms/approximation/treewidth.py index ce673b6eda4..89ac39ce67d 100644 --- a/networkx/algorithms/approximation/treewidth.py +++ b/networkx/algorithms/approximation/treewidth.py @@ -70,7 +70,7 @@ def treewidth_min_fill_in(G): """Returns a treewidth decomposition using the Minimum Fill-in heuristic. The heuristic chooses a node from the graph, where the number of edges - added turning the neighbourhood of the chosen node into clique is as + added turning the neighborhood of the chosen node into clique is as small as possible. Parameters @@ -89,7 +89,7 @@ class MinDegreeHeuristic: """Implements the Minimum Degree heuristic. The heuristic chooses the nodes according to their degree - (number of neighbours), i.e., first the node with the lowest degree is + (number of neighbors), i.e., first the node with the lowest degree is chosen, then the graph is updated and the corresponding node is removed. Next, a new node with the lowest degree is chosen, and so on. """ @@ -136,7 +136,7 @@ def min_fill_in_heuristic(graph): """Implements the Minimum Degree heuristic. Returns the node from the graph, where the number of edges added when - turning the neighbourhood of the chosen node into clique is as small as + turning the neighborhood of the chosen node into clique is as small as possible. This algorithm chooses the nodes using the Minimum Fill-In heuristic. The running time of the algorithm is :math:`O(V^3)` and it uses additional constant memory.""" @@ -201,7 +201,7 @@ def treewidth_decomp(G, heuristic=min_fill_in_heuristic): # get first node from heuristic elim_node = heuristic(graph) while elim_node is not None: - # connect all neighbours with each other + # connect all neighbors with each other nbrs = graph[elim_node] for u, v in itertools.permutations(nbrs, 2): if v not in graph[u]: diff --git a/networkx/algorithms/centrality/voterank_alg.py b/networkx/algorithms/centrality/voterank_alg.py index f9cf43c7813..ca6bd913479 100644 --- a/networkx/algorithms/centrality/voterank_alg.py +++ b/networkx/algorithms/centrality/voterank_alg.py @@ -9,7 +9,7 @@ def voterank(G, number_of_nodes=None): """Select a list of influential nodes in a graph using VoteRank algorithm VoteRank [1]_ computes a ranking of the nodes in a graph G based on a - voting scheme. With VoteRank, all nodes vote for each of its in-neighbours + voting scheme. With VoteRank, all nodes vote for each of its in-neighbors and the node with the highest votes is elected iteratively. The voting ability of out-neighbors of elected nodes is decreased in subsequent turns. diff --git a/networkx/algorithms/cluster.py b/networkx/algorithms/cluster.py index 0500852a7ce..4297d5c2862 100644 --- a/networkx/algorithms/cluster.py +++ b/networkx/algorithms/cluster.py @@ -67,20 +67,18 @@ def triangles(G, nodes=None): # dict used to avoid visiting the same nodes twice # this allows calculating/counting each triangle only once - later_neighbors = {} + later_nbrs = {} # iterate over the nodes in a graph for node, neighbors in G.adjacency(): - later_neighbors[node] = { - n for n in neighbors if n not in later_neighbors and n != node - } + later_nbrs[node] = {n for n in neighbors if n not in later_nbrs and n != node} # instantiate Counter for each node to include isolated nodes # add 1 to the count if a nodes neighbor's neighbor is also a neighbor triangle_counts = Counter(dict.fromkeys(G, 0)) - for node1, neighbors in later_neighbors.items(): + for node1, neighbors in later_nbrs.items(): for node2 in neighbors: - third_nodes = neighbors & later_neighbors[node2] + third_nodes = neighbors & later_nbrs[node2] m = len(third_nodes) triangle_counts[node1] += m triangle_counts[node2] += m diff --git a/networkx/algorithms/coloring/greedy_coloring.py b/networkx/algorithms/coloring/greedy_coloring.py index bbb3ba03cef..efc70f30eb8 100644 --- a/networkx/algorithms/coloring/greedy_coloring.py +++ b/networkx/algorithms/coloring/greedy_coloring.py @@ -274,7 +274,7 @@ def greedy_color(G, strategy="largest_first", interchange=False): """Color a graph using various strategies of greedy graph coloring. Attempts to color a graph using as few colors as possible, where no - neighbours of a node can have same color as the node itself. The + neighbors of a node can have same color as the node itself. The given strategy determines the order in which nodes are colored. The strategies are described in [1]_, and smallest-last is based on @@ -371,11 +371,11 @@ def greedy_color(G, strategy="largest_first", interchange=False): if interchange: return _greedy_coloring_with_interchange(G, nodes) for u in nodes: - # Set to keep track of colors of neighbours - neighbour_colors = {colors[v] for v in G[u] if v in colors} + # Set to keep track of colors of neighbors + nbr_colors = {colors[v] for v in G[u] if v in colors} # Find the first unused color. for color in itertools.count(): - if color not in neighbour_colors: + if color not in nbr_colors: break # Assign the new color to the current node. colors[u] = color diff --git a/networkx/algorithms/coloring/tests/test_coloring.py b/networkx/algorithms/coloring/tests/test_coloring.py index a2a4e39589e..dd2842d548a 100644 --- a/networkx/algorithms/coloring/tests/test_coloring.py +++ b/networkx/algorithms/coloring/tests/test_coloring.py @@ -446,13 +446,13 @@ def color_remaining_nodes( ) for u in node_iterator: - # Set to keep track of colors of neighbours - neighbour_colors = { + # Set to keep track of colors of neighbors + nbr_colors = { aux_colored_nodes[v] for v in G[u] if v in aux_colored_nodes } # Find the first unused color. for color in itertools.count(): - if color not in neighbour_colors: + if color not in nbr_colors: break aux_colored_nodes[u] = color color_assignments.append((u, color)) diff --git a/networkx/algorithms/community/asyn_fluid.py b/networkx/algorithms/community/asyn_fluid.py index 1a0029ae7ff..4c2223e6331 100644 --- a/networkx/algorithms/community/asyn_fluid.py +++ b/networkx/algorithms/community/asyn_fluid.py @@ -24,7 +24,7 @@ def asyn_fluidc(G, k, max_iter=100, seed=None): The algorithm proceeds as follows. First each of the initial k communities is initialized in a random vertex in the graph. Then the algorithm iterates over all vertices in a random order, updating the community of each vertex - based on its own community and the communities of its neighbours. This + based on its own community and the communities of its neighbors. This process is performed several times until convergence. At all times, each community has a total density of 1, which is equally distributed among the vertices it contains. If a vertex changes of @@ -102,7 +102,7 @@ def asyn_fluidc(G, k, max_iter=100, seed=None): com_counter.update({communities[vertex]: density[communities[vertex]]}) except KeyError: pass - # Gather neighbour vertex communities + # Gather neighbor vertex communities for v in G[vertex]: try: com_counter.update({communities[v]: density[communities[v]]}) diff --git a/networkx/algorithms/community/label_propagation.py b/networkx/algorithms/community/label_propagation.py index c10938d627c..5d799d3de5c 100644 --- a/networkx/algorithms/community/label_propagation.py +++ b/networkx/algorithms/community/label_propagation.py @@ -316,7 +316,7 @@ def _most_frequent_labels(node, labeling, G): # accordingly, hence the immediate if statement. return {labeling[node]} - # Compute the frequencies of all neighbours of node + # Compute the frequencies of all neighbors of node freqs = Counter(labeling[q] for q in G[node]) max_freq = max(freqs.values()) return {label for label, freq in freqs.items() if freq == max_freq} diff --git a/networkx/algorithms/community/louvain.py b/networkx/algorithms/community/louvain.py index 772f4d79d69..6aa783d3152 100644 --- a/networkx/algorithms/community/louvain.py +++ b/networkx/algorithms/community/louvain.py @@ -240,7 +240,7 @@ def _one_level(G, m, partition, resolution=1, is_directed=False, seed=None): out_degrees = dict(G.out_degree(weight="weight")) Stot_in = list(in_degrees.values()) Stot_out = list(out_degrees.values()) - # Calculate weights for both in and out neighbours without considering self-loops + # Calculate weights for both in and out neighbors without considering self-loops nbrs = {} for u in G: nbrs[u] = defaultdict(float) @@ -327,7 +327,7 @@ def _neighbor_weights(nbrs, node2com): Parameters ---------- nbrs : dictionary - Dictionary with nodes' neighbours as keys and their edge weight as value. + Dictionary with nodes' neighbors as keys and their edge weight as value. node2com : dictionary Dictionary with all graph's nodes as keys and their community index as value. diff --git a/networkx/algorithms/core.py b/networkx/algorithms/core.py index 2a766ead754..87835c299d5 100644 --- a/networkx/algorithms/core.py +++ b/networkx/algorithms/core.py @@ -394,7 +394,7 @@ def k_corona(G, k, core_number=None): """Returns the k-corona of G. The k-corona is the subgraph of nodes in the k-core which have - exactly k neighbours in the k-core. + exactly k neighbors in the k-core. .. deprecated:: 3.3 `k_corona` will not accept `MultiGraph` objects in version 3.5. diff --git a/networkx/algorithms/dominating.py b/networkx/algorithms/dominating.py index 97408ab4380..80276bb5a1d 100644 --- a/networkx/algorithms/dominating.py +++ b/networkx/algorithms/dominating.py @@ -55,13 +55,13 @@ def dominating_set(G, start_with=None): while remaining_nodes: # Choose an arbitrary node and determine its undominated neighbors. v = remaining_nodes.pop() - undominated_neighbors = set(G[v]) - dominating_set + undominated_nbrs = set(G[v]) - dominating_set # Add the node to the dominating set and the neighbors to the # dominated set. Finally, remove all of those nodes from the set # of remaining nodes. dominating_set.add(v) - dominated_nodes |= undominated_neighbors - remaining_nodes -= undominated_neighbors + dominated_nodes |= undominated_nbrs + remaining_nodes -= undominated_nbrs return dominating_set diff --git a/networkx/algorithms/graph_hashing.py b/networkx/algorithms/graph_hashing.py index d85a44a3604..0b69b08480c 100644 --- a/networkx/algorithms/graph_hashing.py +++ b/networkx/algorithms/graph_hashing.py @@ -43,7 +43,7 @@ def weisfeiler_lehman_graph_hash( ): """Return Weisfeiler Lehman (WL) graph hash. - The function iteratively aggregates and hashes neighbourhoods of each node. + The function iteratively aggregates and hashes neighborhoods of each node. After each node's neighbors are hashed to obtain updated node labels, a hashed histogram of resulting labels is returned as the final hash. @@ -176,7 +176,7 @@ def weisfeiler_lehman_subgraph_hashes( additionally a hash of the initial node label (or equivalently a subgraph of depth 0) - The function iteratively aggregates and hashes neighbourhoods of each node. + The function iteratively aggregates and hashes neighborhoods of each node. This is achieved for each step by replacing for each node its label from the previous iteration with its hashed 1-hop neighborhood aggregate. The new node label is then appended to a list of node labels for each @@ -254,7 +254,7 @@ def weisfeiler_lehman_subgraph_hashes( The first 2 WL subgraph hashes match. From this we can conclude that it's very likely the neighborhood of 4 hops around these nodes are isomorphic: each - iteration aggregates 1-hop neighbourhoods meaning hashes at depth $n$ are influenced + iteration aggregates 1-hop neighborhoods meaning hashes at depth $n$ are influenced by every node within $2n$ hops. However the neighborhood of 6 hops is no longer isomorphic since their 3rd hash does diff --git a/networkx/algorithms/isomorphism/ismags.py b/networkx/algorithms/isomorphism/ismags.py index 25ce94c87ec..1fe920adfbb 100644 --- a/networkx/algorithms/isomorphism/ismags.py +++ b/networkx/algorithms/isomorphism/ismags.py @@ -848,11 +848,11 @@ def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=No left_to_map = to_be_mapped - set(mapping.keys()) new_candidates = candidates.copy() - sgn_neighbours = set(self.subgraph[sgn]) - not_gn_neighbours = set(self.graph.nodes) - set(self.graph[gn]) + sgn_nbrs = set(self.subgraph[sgn]) + not_gn_nbrs = set(self.graph.nodes) - set(self.graph[gn]) for sgn2 in left_to_map: - if sgn2 not in sgn_neighbours: - gn2_options = not_gn_neighbours + if sgn2 not in sgn_nbrs: + gn2_options = not_gn_nbrs else: # Get all edges to gn of the right color: g_edges = self._edges_of_same_color(sgn, sgn2) diff --git a/networkx/algorithms/isomorphism/vf2pp.py b/networkx/algorithms/isomorphism/vf2pp.py index 953204f669b..8cc83eb8a53 100644 --- a/networkx/algorithms/isomorphism/vf2pp.py +++ b/networkx/algorithms/isomorphism/vf2pp.py @@ -476,8 +476,8 @@ def _find_candidates( G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params - covered_neighbors = [nbr for nbr in G1[u] if nbr in mapping] - if not covered_neighbors: + covered_nbrs = [nbr for nbr in G1[u] if nbr in mapping] + if not covered_nbrs: candidates = set(nodes_of_G2Labels[G1_labels[u]]) candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]]) candidates.intersection_update(T2_tilde) @@ -492,10 +492,10 @@ def _find_candidates( ) return candidates - nbr1 = covered_neighbors[0] + nbr1 = covered_nbrs[0] common_nodes = set(G2[mapping[nbr1]]) - for nbr1 in covered_neighbors[1:]: + for nbr1 in covered_nbrs[1:]: common_nodes.intersection_update(G2[mapping[nbr1]]) common_nodes.difference_update(reverse_mapping) diff --git a/networkx/algorithms/matching.py b/networkx/algorithms/matching.py index b20d7f6970c..f44dc9e67cd 100644 --- a/networkx/algorithms/matching.py +++ b/networkx/algorithms/matching.py @@ -410,7 +410,7 @@ class Blossom: # and w is a vertex in b.childs[wrap(i+1)]. # If b is a top-level S-blossom, - # b.mybestedges is a list of least-slack edges to neighbouring + # b.mybestedges is a list of least-slack edges to neighboring # S-blossoms, or None if no such list has been computed yet. # This is used for efficient computation of delta3. @@ -738,12 +738,12 @@ def _recurse(b, endstage): j += jstep while b.childs[j] != entrychild: # Examine the vertices of the sub-blossom to see whether - # it is reachable from a neighbouring S-vertex outside the + # it is reachable from a neighboring S-vertex outside the # expanding blossom. bv = b.childs[j] if label.get(bv) == 1: # This sub-blossom just got label S through one of its - # neighbours; leave it be. + # neighbors; leave it be. j += jstep continue if isinstance(bv, Blossom): @@ -972,11 +972,11 @@ def verifyOptimum(): v = queue.pop() assert label[inblossom[v]] == 1 - # Scan its neighbours: + # Scan its neighbors: for w in G.neighbors(v): if w == v: continue # ignore self-loops - # w is a neighbour to v + # w is a neighbor to v bv = inblossom[v] bw = inblossom[w] if bv == bw: diff --git a/networkx/algorithms/planar_drawing.py b/networkx/algorithms/planar_drawing.py index 47f94f17215..5c091e26e4d 100644 --- a/networkx/algorithms/planar_drawing.py +++ b/networkx/algorithms/planar_drawing.py @@ -78,18 +78,18 @@ def combinatorial_embedding_to_pos(embedding, fully_triangulate=False): left_t_child[v3] = None for k in range(3, len(node_list)): - vk, contour_neighbors = node_list[k] - wp = contour_neighbors[0] - wp1 = contour_neighbors[1] - wq = contour_neighbors[-1] - wq1 = contour_neighbors[-2] - adds_mult_tri = len(contour_neighbors) > 2 + vk, contour_nbrs = node_list[k] + wp = contour_nbrs[0] + wp1 = contour_nbrs[1] + wq = contour_nbrs[-1] + wq1 = contour_nbrs[-2] + adds_mult_tri = len(contour_nbrs) > 2 # Stretch gaps: delta_x[wp1] += 1 delta_x[wq] += 1 - delta_x_wp_wq = sum(delta_x[x] for x in contour_neighbors[1:]) + delta_x_wp_wq = sum(delta_x[x] for x in contour_nbrs[1:]) # Adjust offsets delta_x[vk] = (-y_coordinate[wp] + delta_x_wp_wq + y_coordinate[wq]) // 2 diff --git a/networkx/algorithms/similarity.py b/networkx/algorithms/similarity.py index 3d943c20d7e..92b0cf9a3e1 100644 --- a/networkx/algorithms/similarity.py +++ b/networkx/algorithms/similarity.py @@ -1689,22 +1689,22 @@ def generate_random_paths( for _ in range(path_length): # Randomly sample a neighbor (v_j) according # to transition probabilities from ``node`` (v) to its neighbors - neighbor_index = np.random.choice( + nbr_index = np.random.choice( num_nodes, p=transition_probabilities[starting_index] ) # Set current vertex (v = v_j) - starting_index = neighbor_index + starting_index = nbr_index # Add v into p_r - neighbor_node = node_map[neighbor_index] - path.append(neighbor_node) + nbr_node = node_map[nbr_index] + path.append(nbr_node) # Add p_r into P_v if index_map is not None: - if neighbor_node in index_map: - index_map[neighbor_node].add(path_index) + if nbr_node in index_map: + index_map[nbr_node].add(path_index) else: - index_map[neighbor_node] = {path_index} + index_map[nbr_node] = {path_index} yield path diff --git a/networkx/algorithms/sparsifiers.py b/networkx/algorithms/sparsifiers.py index a94aee0d09d..5890a5b1e9a 100644 --- a/networkx/algorithms/sparsifiers.py +++ b/networkx/algorithms/sparsifiers.py @@ -136,11 +136,11 @@ def spanner(G, stretch, weight=None, seed=None): # remove edges to centers with edge weight less than # closest_center_weight for neighbor in residual_graph.adj[v]: - neighbor_cluster = clustering[neighbor] - neighbor_weight = lightest_edge_weight[neighbor_cluster] + nbr_cluster = clustering[neighbor] + nbr_weight = lightest_edge_weight[nbr_cluster] if ( - neighbor_cluster == closest_center - or neighbor_weight < closest_center_weight + nbr_cluster == closest_center + or nbr_weight < closest_center_weight ): edges_to_remove.add((v, neighbor)) @@ -257,14 +257,14 @@ def _lightest_edge_dicts(residual_graph, clustering, node): lightest_edge_neighbor = {} lightest_edge_weight = {} for neighbor in residual_graph.adj[node]: - neighbor_center = clustering[neighbor] + nbr_center = clustering[neighbor] weight = residual_graph[node][neighbor]["weight"] if ( - neighbor_center not in lightest_edge_weight - or weight < lightest_edge_weight[neighbor_center] + nbr_center not in lightest_edge_weight + or weight < lightest_edge_weight[nbr_center] ): - lightest_edge_neighbor[neighbor_center] = neighbor - lightest_edge_weight[neighbor_center] = weight + lightest_edge_neighbor[nbr_center] = neighbor + lightest_edge_weight[nbr_center] = weight return lightest_edge_neighbor, lightest_edge_weight diff --git a/networkx/algorithms/summarization.py b/networkx/algorithms/summarization.py index 26665e09b1a..236810f0d98 100644 --- a/networkx/algorithms/summarization.py +++ b/networkx/algorithms/summarization.py @@ -180,12 +180,12 @@ def dedensify(G, threshold, prefix=None, copy=True): auxiliary = {} for node in G: - high_degree_neighbors = frozenset(high_degree_nodes & set(G[node])) - if high_degree_neighbors: - if high_degree_neighbors in auxiliary: - auxiliary[high_degree_neighbors].add(node) + high_degree_nbrs = frozenset(high_degree_nodes & set(G[node])) + if high_degree_nbrs: + if high_degree_nbrs in auxiliary: + auxiliary[high_degree_nbrs].add(node) else: - auxiliary[high_degree_neighbors] = {node} + auxiliary[high_degree_nbrs] = {node} if copy: G = G.copy() @@ -326,33 +326,33 @@ def _snap_eligible_group(G, groups, group_lookup, edge_types): ------- tuple: group ID to split, and neighbor-groups participation_counts data structure """ - neighbor_info = {node: {gid: Counter() for gid in groups} for node in group_lookup} + nbr_info = {node: {gid: Counter() for gid in groups} for node in group_lookup} for group_id in groups: current_group = groups[group_id] - # build neighbor_info for nodes in group + # build nbr_info for nodes in group for node in current_group: - neighbor_info[node] = {group_id: Counter() for group_id in groups} + nbr_info[node] = {group_id: Counter() for group_id in groups} edges = G.edges(node, keys=True) if G.is_multigraph() else G.edges(node) for edge in edges: neighbor = edge[1] edge_type = edge_types[edge] neighbor_group_id = group_lookup[neighbor] - neighbor_info[node][neighbor_group_id][edge_type] += 1 + nbr_info[node][neighbor_group_id][edge_type] += 1 # check if group_id is eligible to be split group_size = len(current_group) for other_group_id in groups: edge_counts = Counter() for node in current_group: - edge_counts.update(neighbor_info[node][other_group_id].keys()) + edge_counts.update(nbr_info[node][other_group_id].keys()) if not all(count == group_size for count in edge_counts.values()): - # only the neighbor_info of the returned group_id is required for handling group splits - return group_id, neighbor_info + # only the nbr_info of the returned group_id is required for handling group splits + return group_id, nbr_info - # if no eligible groups, complete neighbor_info is calculated - return None, neighbor_info + # if no eligible groups, complete nbr_info is calculated + return None, nbr_info def _snap_split(groups, neighbor_info, group_lookup, group_id): @@ -540,12 +540,12 @@ def snap_aggregation( for node, node_type in group_lookup.items(): groups[node_type].add(node) - eligible_group_id, neighbor_info = _snap_eligible_group( + eligible_group_id, nbr_info = _snap_eligible_group( G, groups, group_lookup, edge_types ) while eligible_group_id: - groups = _snap_split(groups, neighbor_info, group_lookup, eligible_group_id) - eligible_group_id, neighbor_info = _snap_eligible_group( + groups = _snap_split(groups, nbr_info, group_lookup, eligible_group_id) + eligible_group_id, nbr_info = _snap_eligible_group( G, groups, group_lookup, edge_types ) return _snap_build_graph( @@ -553,7 +553,7 @@ def snap_aggregation( groups, node_attributes, edge_attributes, - neighbor_info, + nbr_info, edge_types, prefix, supernode_attribute, diff --git a/networkx/algorithms/tests/test_mis.py b/networkx/algorithms/tests/test_mis.py index 379c5c07c7a..02be02d4c33 100644 --- a/networkx/algorithms/tests/test_mis.py +++ b/networkx/algorithms/tests/test_mis.py @@ -58,5 +58,5 @@ def test_random_graphs(): G = nx.erdos_renyi_graph(i * 10 + 1, random.random()) IS = nx.maximal_independent_set(G) assert G.subgraph(IS).number_of_edges() == 0 - neighbors_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS)) - assert all(v in neighbors_of_MIS for v in set(G.nodes()).difference(IS)) + nbrs_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS)) + assert all(v in nbrs_of_MIS for v in set(G.nodes()).difference(IS)) diff --git a/networkx/classes/graph.py b/networkx/classes/graph.py index 5bbf079a47b..e149ab2eeb3 100644 --- a/networkx/classes/graph.py +++ b/networkx/classes/graph.py @@ -1547,8 +1547,8 @@ def clear_edges(self): >>> list(G.edges) [] """ - for neighbours_dict in self._adj.values(): - neighbours_dict.clear() + for nbr_dict in self._adj.values(): + nbr_dict.clear() def is_multigraph(self): """Returns True if graph is a multigraph, False otherwise.""" diff --git a/networkx/generators/duplication.py b/networkx/generators/duplication.py index 6daa5a9c738..ef7f374bbda 100644 --- a/networkx/generators/duplication.py +++ b/networkx/generators/duplication.py @@ -76,10 +76,10 @@ def partial_duplication_graph(N, n, p, q, seed=None): G.add_node(new_node) # For each neighbor of u... - for neighbor_node in list(nx.all_neighbors(G, src_node)): + for nbr_node in list(nx.all_neighbors(G, src_node)): # Add the neighbor to v with probability p. if seed.random() < p: - G.add_edge(new_node, neighbor_node) + G.add_edge(new_node, nbr_node) # Join v and u with probability q. if seed.random() < q: diff --git a/networkx/generators/line.py b/networkx/generators/line.py index 731371d4d55..ccee9605584 100644 --- a/networkx/generators/line.py +++ b/networkx/generators/line.py @@ -355,12 +355,12 @@ def _odd_triangle(G, T): if e[0] not in G[e[1]]: raise nx.NetworkXError(f"Edge ({e[0]}, {e[1]}) not in graph") - T_neighbors = defaultdict(int) + T_nbrs = defaultdict(int) for t in T: for v in G[t]: if v not in T: - T_neighbors[v] += 1 - return any(T_neighbors[v] in [1, 3] for v in T_neighbors) + T_nbrs[v] += 1 + return any(T_nbrs[v] in [1, 3] for v in T_nbrs) def _find_partition(G, starting_cell): diff --git a/networkx/generators/random_graphs.py b/networkx/generators/random_graphs.py index 75487ef6a21..e08c4ce7d1f 100644 --- a/networkx/generators/random_graphs.py +++ b/networkx/generators/random_graphs.py @@ -911,16 +911,16 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): node = seed.choice(eligible_nodes) # The available nodes do have a neighbor at least. - neighbor_nodes = list(G[node]) + nbr_nodes = list(G[node]) # Choosing the other end that will get detached - src_node = seed.choice(neighbor_nodes) + src_node = seed.choice(nbr_nodes) # Picking a target node that is not 'node' or # neighbor with 'node', with preferential attachment - neighbor_nodes.append(node) + nbr_nodes.append(node) dest_node = seed.choice( - [nd for nd in attachment_preference if nd not in neighbor_nodes] + [nd for nd in attachment_preference if nd not in nbr_nodes] ) # Rewire G.remove_edge(node, src_node) From c0f5918a2e34679242f26c9c116fee3456e5974a Mon Sep 17 00:00:00 2001 From: Mridul Seth Date: Fri, 15 Dec 2023 23:12:53 +0400 Subject: [PATCH 007/129] DOC: add reference to fast_label_propagation_communities (#7167) --- doc/reference/algorithms/community.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/reference/algorithms/community.rst b/doc/reference/algorithms/community.rst index 55f7ad61ab3..1d107099790 100644 --- a/doc/reference/algorithms/community.rst +++ b/doc/reference/algorithms/community.rst @@ -47,6 +47,7 @@ Label propagation asyn_lpa_communities label_propagation_communities + fast_label_propagation_communities Louvain Community Detection --------------------------- From 6ad78426badf78347e59758d0ef324d51794d5e0 Mon Sep 17 00:00:00 2001 From: Mridul Seth Date: Sat, 16 Dec 2023 05:08:54 +0400 Subject: [PATCH 008/129] MAINT: use ruff format instead of black (#7160) * use ruff format instead of black * Rm pylint comment guards. --------- Co-authored-by: Ross Barnowski --- .pre-commit-config.yaml | 9 +++------ networkx/algorithms/assortativity/correlation.py | 4 ++-- networkx/algorithms/isomorphism/ismags.py | 10 ++-------- .../isomorphism/tests/test_vf2pp_helpers.py | 15 +++++++++------ networkx/algorithms/shortest_paths/generic.py | 5 +++-- networkx/algorithms/tests/test_bridges.py | 2 +- networkx/utils/tests/test_misc.py | 3 ++- 7 files changed, 22 insertions(+), 26 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d27674aced6..719aedfbbb0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,26 +2,23 @@ # pre-commit install repos: - - repo: https://github.com/psf/black - rev: 23.9.1 - hooks: - - id: black - repo: https://github.com/adamchainz/blacken-docs rev: 1.16.0 hooks: - id: blacken-docs - repo: https://github.com/pre-commit/mirrors-prettier - rev: v3.0.3 + rev: v3.1.0 hooks: - id: prettier files: \.(html|md|toml|yml|yaml) args: [--prose-wrap=preserve] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.291 + rev: v0.1.8 hooks: - id: ruff args: - --fix + - id: ruff-format - repo: local hooks: - id: generate_requirements.py diff --git a/networkx/algorithms/assortativity/correlation.py b/networkx/algorithms/assortativity/correlation.py index 35ea78d6d52..d517132ff21 100644 --- a/networkx/algorithms/assortativity/correlation.py +++ b/networkx/algorithms/assortativity/correlation.py @@ -94,7 +94,7 @@ def degree_assortativity_coefficient(G, x="out", y="in", weight=None, nodes=None else: degrees = {d for _, d in G.degree(nodes, weight=weight)} - mapping = {d: i for i, d, in enumerate(degrees)} + mapping = {d: i for i, d in enumerate(degrees)} M = degree_mixing_matrix(G, x=x, y=y, nodes=nodes, weight=weight, mapping=mapping) return _numeric_ac(M, mapping=mapping) @@ -251,7 +251,7 @@ def numeric_assortativity_coefficient(G, attribute, nodes=None): if nodes is None: nodes = G.nodes vals = {G.nodes[n][attribute] for n in nodes} - mapping = {d: i for i, d, in enumerate(vals)} + mapping = {d: i for i, d in enumerate(vals)} M = attribute_mixing_matrix(G, attribute, nodes, mapping) return _numeric_ac(M, mapping) diff --git a/networkx/algorithms/isomorphism/ismags.py b/networkx/algorithms/isomorphism/ismags.py index 1fe920adfbb..24819faf95c 100644 --- a/networkx/algorithms/isomorphism/ismags.py +++ b/networkx/algorithms/isomorphism/ismags.py @@ -882,10 +882,7 @@ def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=No # The next node is the one that is unmapped and has fewest # candidates - # Pylint disables because it's a one-shot function. - next_sgn = min( - left_to_map, key=lambda n: min(new_candidates[n], key=len) - ) # pylint: disable=cell-var-from-loop + next_sgn = min(left_to_map, key=lambda n: min(new_candidates[n], key=len)) yield from self._map_nodes( next_sgn, new_candidates, @@ -909,10 +906,7 @@ def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None): # "part of" the subgraph in to_be_mapped, and we make it a little # smaller every iteration. - # pylint disable because it's guarded against by default value - current_size = len( - next(iter(to_be_mapped), []) - ) # pylint: disable=stop-iteration-return + current_size = len(next(iter(to_be_mapped), [])) found_iso = False if current_size <= len(self.graph): diff --git a/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py b/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py index 1a46c38f501..0e29b1be617 100644 --- a/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py +++ b/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py @@ -57,8 +57,9 @@ def test_single_node(self): dict(zip(G2, it.cycle(labels_many))), "label", ) - l1, l2 = nx.get_node_attributes(G1, "label"), nx.get_node_attributes( - G2, "label" + l1, l2 = ( + nx.get_node_attributes(G1, "label"), + nx.get_node_attributes(G2, "label"), ) gparams = _GraphParameters( @@ -118,8 +119,9 @@ def test_matching_order(self): dict(zip(G2, it.cycle(labels))), "label", ) - l1, l2 = nx.get_node_attributes(G1, "label"), nx.get_node_attributes( - G2, "label" + l1, l2 = ( + nx.get_node_attributes(G1, "label"), + nx.get_node_attributes(G2, "label"), ) gparams = _GraphParameters( G1, @@ -155,8 +157,9 @@ def test_matching_order_all_branches(self): G2.nodes[4]["label"] = "red" G2.nodes[5]["label"] = "blue" - l1, l2 = nx.get_node_attributes(G1, "label"), nx.get_node_attributes( - G2, "label" + l1, l2 = ( + nx.get_node_attributes(G1, "label"), + nx.get_node_attributes(G2, "label"), ) gparams = _GraphParameters( G1, diff --git a/networkx/algorithms/shortest_paths/generic.py b/networkx/algorithms/shortest_paths/generic.py index 0d1bb5795f9..2d3f75c04e5 100644 --- a/networkx/algorithms/shortest_paths/generic.py +++ b/networkx/algorithms/shortest_paths/generic.py @@ -647,8 +647,9 @@ def all_pairs_all_shortest_paths(G, weight=None, method="dijkstra"): single_source_all_shortest_paths """ for n in G: - yield n, dict( - single_source_all_shortest_paths(G, n, weight=weight, method=method) + yield ( + n, + dict(single_source_all_shortest_paths(G, n, weight=weight, method=method)), ) diff --git a/networkx/algorithms/tests/test_bridges.py b/networkx/algorithms/tests/test_bridges.py index 9c3ceba607f..b47f5860083 100644 --- a/networkx/algorithms/tests/test_bridges.py +++ b/networkx/algorithms/tests/test_bridges.py @@ -127,7 +127,7 @@ def test_no_weight(self): inf = float("inf") expected = {(3, 4, inf), (4, 3, inf)} assert next(nx.local_bridges(self.BB)) in expected - expected = {(u, v, 3) for u, v, in self.square.edges} + expected = {(u, v, 3) for u, v in self.square.edges} assert set(nx.local_bridges(self.square)) == expected assert list(nx.local_bridges(self.tri)) == [] diff --git a/networkx/utils/tests/test_misc.py b/networkx/utils/tests/test_misc.py index 18d2878f635..38e9a0e5aa3 100644 --- a/networkx/utils/tests/test_misc.py +++ b/networkx/utils/tests/test_misc.py @@ -247,7 +247,8 @@ def test_arbitrary_element(iterable_type, expected): @pytest.mark.parametrize( - "iterator", ((i for i in range(3)), iter([1, 2, 3])) # generator + "iterator", + ((i for i in range(3)), iter([1, 2, 3])), # generator ) def test_arbitrary_element_raises(iterator): """Value error is raised when input is an iterator.""" From 75c0345d45477e3392288127da56365b04bf3512 Mon Sep 17 00:00:00 2001 From: Aditi Juneja <91629733+Schefflera-Arboricola@users.noreply.github.com> Date: Sun, 17 Dec 2023 00:41:04 +0530 Subject: [PATCH 009/129] updated See also sec of argmap class (#7163) * updated See also sec of argmap class * Update decorators.py --- networkx/utils/decorators.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/networkx/utils/decorators.py b/networkx/utils/decorators.py index cc15882da72..dfb39b40f34 100644 --- a/networkx/utils/decorators.py +++ b/networkx/utils/decorators.py @@ -686,10 +686,8 @@ class in broad terms to aid in understanding how to use it. not_implemented_for open_file nodes_or_number - random_state py_random_state - networkx.community.quality.require_partition - require_partition + networkx.algorithms.community.quality.require_partition """ From f34dda2c924924edb6a350c62420fb0187ab150f Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Sat, 16 Dec 2023 18:17:22 -0800 Subject: [PATCH 010/129] Modify GML test to fix invalid octal character warning. (#7159) * Modify GML test to fix invalid octal character warning. * Switch to unicode char. * Rm extra unicode string indicators. --- networkx/readwrite/tests/test_gml.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/networkx/readwrite/tests/test_gml.py b/networkx/readwrite/tests/test_gml.py index 869b21f6bb2..50eff5c3617 100644 --- a/networkx/readwrite/tests/test_gml.py +++ b/networkx/readwrite/tests/test_gml.py @@ -554,8 +554,8 @@ def assert_parse_error(gml): "directed 1 multigraph 1 ]" ) assert_parse_error( - "graph [edge [ source u'u\4200' target u'u\4200' ] " - + "node [ id u'u\4200' label b ] ]" + "graph [edge [ source '\u4200' target '\u4200' ] " + + "node [ id '\u4200' label b ] ]" ) def assert_generate_error(*args, **kwargs): From 483ea9a4ebc51f813bf9c82dea294a35d7c53c10 Mon Sep 17 00:00:00 2001 From: Aditi Juneja <91629733+Schefflera-Arboricola@users.noreply.github.com> Date: Mon, 18 Dec 2023 02:19:53 +0530 Subject: [PATCH 011/129] DOC : updated examples in mincost.py (#7169) --- networkx/algorithms/flow/mincost.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/networkx/algorithms/flow/mincost.py b/networkx/algorithms/flow/mincost.py index cc8626c7c3c..7a6c3351190 100644 --- a/networkx/algorithms/flow/mincost.py +++ b/networkx/algorithms/flow/mincost.py @@ -182,6 +182,8 @@ def min_cost_flow(G, demand="demand", capacity="capacity", weight="weight"): >>> G.add_edge("b", "d", weight=1, capacity=9) >>> G.add_edge("c", "d", weight=2, capacity=5) >>> flowDict = nx.min_cost_flow(G) + >>> flowDict + {'a': {'b': 4, 'c': 1}, 'd': {}, 'b': {'d': 4}, 'c': {'d': 1}} """ return nx.network_simplex(G, demand=demand, capacity=capacity, weight=weight)[1] @@ -227,6 +229,21 @@ def cost_of_flow(G, flowDict, weight="weight"): cause problems). As a workaround you can use integer numbers by multiplying the relevant edge attributes by a convenient constant factor (eg 100). + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_node("a", demand=-5) + >>> G.add_node("d", demand=5) + >>> G.add_edge("a", "b", weight=3, capacity=4) + >>> G.add_edge("a", "c", weight=6, capacity=10) + >>> G.add_edge("b", "d", weight=1, capacity=9) + >>> G.add_edge("c", "d", weight=2, capacity=5) + >>> flowDict = nx.min_cost_flow(G) + >>> flowDict + {'a': {'b': 4, 'c': 1}, 'd': {}, 'b': {'d': 4}, 'c': {'d': 1}} + >>> nx.cost_of_flow(G, flowDict) + 24 """ return sum((flowDict[u][v] * d.get(weight, 0) for u, v, d in G.edges(data=True))) From d68caf64b057f3c5f0adf4f09ce3843e4f2395c4 Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Sun, 17 Dec 2023 13:20:14 -0800 Subject: [PATCH 012/129] Ensure warnings related to changes in shortest_path returns are visible to users (#7161) * Update shortest_path warning. * Update single_target_shortest_path_length futurewarning. * Update warnings filters in conftest. * Update developer docs. --- doc/developer/deprecations.rst | 8 ++++---- networkx/algorithms/shortest_paths/generic.py | 13 +++++++++++-- networkx/algorithms/shortest_paths/unweighted.py | 10 ++++++++-- networkx/conftest.py | 8 ++++---- 4 files changed, 27 insertions(+), 12 deletions(-) diff --git a/doc/developer/deprecations.rst b/doc/developer/deprecations.rst index a2c6700cec0..3b8ce95c573 100644 --- a/doc/developer/deprecations.rst +++ b/doc/developer/deprecations.rst @@ -46,10 +46,6 @@ Version 3.3 * Remove pydot functionality ``drawing/nx_pydot.py``, if pydot is still not being maintained. See #5723 * Remove the ``forest_str`` function from ``readwrite/text.py``. Replace existing usages with ``write_network_text``. -* Change ``single_target_shortest_path_length`` in ``algorithms/shortest_path/unweighted.py`` - to return a dict. See #6527 -* Change ``shortest_path`` in ``algorithms/shortest_path/generic.py`` - to return a iterator. See #6527 Version 3.4 ~~~~~~~~~~~ @@ -69,3 +65,7 @@ Version 3.5 * Remove ``all_triplets`` from ``algorithms/triads.py`` * Remove ``random_triad`` from ``algorithms/triad.py``. * Add `not_implemented_for("multigraph”)` decorator to ``k_core``, ``k_shell``, ``k_crust`` and ``k_corona`` functions. +* Change ``single_target_shortest_path_length`` in ``algorithms/shortest_path/unweighted.py`` + to return a dict. See #6527 +* Change ``shortest_path`` in ``algorithms/shortest_path/generic.py`` + to return a iterator. See #6527 diff --git a/networkx/algorithms/shortest_paths/generic.py b/networkx/algorithms/shortest_paths/generic.py index 2d3f75c04e5..1ed47aa7144 100644 --- a/networkx/algorithms/shortest_paths/generic.py +++ b/networkx/algorithms/shortest_paths/generic.py @@ -135,8 +135,17 @@ def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"): method = "unweighted" if weight is None else method if source is None: if target is None: - msg = "shortest_path for all_pairs will return an iterator in v3.3" - warnings.warn(msg, DeprecationWarning) + warnings.warn( + ( + "\n\nshortest_path will return an iterator that yields\n" + "(node, path) pairs instead of a dictionary when source\n" + "and target are unspecified beginning in version 3.5\n\n" + "To keep the current behavior, use:\n\n" + "\tdict(nx.shortest_path(G))" + ), + FutureWarning, + stacklevel=3, + ) # Find paths between all pairs. if method == "unweighted": diff --git a/networkx/algorithms/shortest_paths/unweighted.py b/networkx/algorithms/shortest_paths/unweighted.py index 78df41bc389..bc329c55263 100644 --- a/networkx/algorithms/shortest_paths/unweighted.py +++ b/networkx/algorithms/shortest_paths/unweighted.py @@ -135,8 +135,14 @@ def single_target_shortest_path_length(G, target, cutoff=None): if target not in G: raise nx.NodeNotFound(f"Target {target} is not in G") - msg = "single_target_shortest_path_length will return a dict starting in v3.3" - warnings.warn(msg, DeprecationWarning) + warnings.warn( + ( + "\n\nsingle_target_shortest_path_length will return a dict instead of" + "\nan iterator in version 3.5" + ), + FutureWarning, + stacklevel=3, + ) if cutoff is None: cutoff = float("inf") diff --git a/networkx/conftest.py b/networkx/conftest.py index ef3551f4156..41f4789c330 100644 --- a/networkx/conftest.py +++ b/networkx/conftest.py @@ -94,13 +94,13 @@ def set_warnings(): ) warnings.filterwarnings( "ignore", - category=DeprecationWarning, - message="single_target_shortest_path_length will", + category=FutureWarning, + message="\n\nsingle_target_shortest_path_length", ) warnings.filterwarnings( "ignore", - category=DeprecationWarning, - message="shortest_path for all_pairs", + category=FutureWarning, + message="\n\nshortest_path", ) warnings.filterwarnings( "ignore", category=DeprecationWarning, message="\nforest_str is deprecated" From 92afda97fcd05abae7c3b6efa3e880943cf3ee39 Mon Sep 17 00:00:00 2001 From: Mridul Seth Date: Mon, 18 Dec 2023 08:52:09 +0400 Subject: [PATCH 013/129] Sync up behavior of is_{type} for empty graphs (#5849) Improve consistency of property-checking functions (i.e. is_something) for empty graph inputs. Tend towards NetworkXPointlessConcept exceptions for consistency. Co-authored-by: Ross Barnowski --- networkx/algorithms/chordal.py | 2 ++ .../components/tests/test_strongly_connected.py | 3 --- .../components/tests/test_weakly_connected.py | 6 ++++++ networkx/algorithms/dag.py | 3 ++- networkx/algorithms/distance_regular.py | 2 ++ networkx/algorithms/regular.py | 2 ++ networkx/algorithms/tests/test_dag.py | 8 +++++++- networkx/algorithms/tests/test_distance_regular.py | 9 +++++++++ networkx/algorithms/tests/test_euler.py | 7 +++++++ networkx/algorithms/tests/test_regular.py | 6 ++++++ networkx/algorithms/tree/tests/test_recognition.py | 12 ++++++++++++ 11 files changed, 55 insertions(+), 5 deletions(-) diff --git a/networkx/algorithms/chordal.py b/networkx/algorithms/chordal.py index 2aa56798317..0793761027a 100644 --- a/networkx/algorithms/chordal.py +++ b/networkx/algorithms/chordal.py @@ -339,6 +339,8 @@ def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize): It ignores any self loops. """ + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") unnumbered = set(G) if s is None: s = arbitrary_element(G) diff --git a/networkx/algorithms/components/tests/test_strongly_connected.py b/networkx/algorithms/components/tests/test_strongly_connected.py index f1c773026a6..6d4ea855229 100644 --- a/networkx/algorithms/components/tests/test_strongly_connected.py +++ b/networkx/algorithms/components/tests/test_strongly_connected.py @@ -186,9 +186,6 @@ def test_connected_raise(self): with pytest.deprecated_call(): next(nx.strongly_connected_components_recursive(G)) pytest.raises(NetworkXNotImplemented, nx.is_strongly_connected, G) - pytest.raises( - nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph() - ) pytest.raises(NetworkXNotImplemented, nx.condensation, G) strong_cc_methods = ( diff --git a/networkx/algorithms/components/tests/test_weakly_connected.py b/networkx/algorithms/components/tests/test_weakly_connected.py index e313263668c..f014478930f 100644 --- a/networkx/algorithms/components/tests/test_weakly_connected.py +++ b/networkx/algorithms/components/tests/test_weakly_connected.py @@ -88,3 +88,9 @@ def test_connected_mutability(self): assert len(seen & component) == 0 seen.update(component) component.clear() + + +def test_is_weakly_connected_empty_graph_raises(): + G = nx.DiGraph() + with pytest.raises(nx.NetworkXPointlessConcept, match="Connectivity is undefined"): + nx.is_weakly_connected(G) diff --git a/networkx/algorithms/dag.py b/networkx/algorithms/dag.py index 58138c340c4..eb3a064e4b2 100644 --- a/networkx/algorithms/dag.py +++ b/networkx/algorithms/dag.py @@ -641,7 +641,8 @@ def is_aperiodic(G): """ if not G.is_directed(): raise nx.NetworkXError("is_aperiodic not defined for undirected graphs") - + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") s = arbitrary_element(G) levels = {s: 0} this_level = [s] diff --git a/networkx/algorithms/distance_regular.py b/networkx/algorithms/distance_regular.py index 18c19ee00e0..da3754330d0 100644 --- a/networkx/algorithms/distance_regular.py +++ b/networkx/algorithms/distance_regular.py @@ -147,6 +147,8 @@ def intersection_array(G): global_parameters """ # test for regular graph (all degrees must be equal) + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") degree = iter(G.degree()) (_, k) = next(degree) for _, knext in degree: diff --git a/networkx/algorithms/regular.py b/networkx/algorithms/regular.py index d3332342794..52b5ed6ee81 100644 --- a/networkx/algorithms/regular.py +++ b/networkx/algorithms/regular.py @@ -29,6 +29,8 @@ def is_regular(G): True """ + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") n1 = nx.utils.arbitrary_element(G) if not G.is_directed(): d1 = G.degree(n1) diff --git a/networkx/algorithms/tests/test_dag.py b/networkx/algorithms/tests/test_dag.py index 540c0c55e4d..d26c9fd3b4d 100644 --- a/networkx/algorithms/tests/test_dag.py +++ b/networkx/algorithms/tests/test_dag.py @@ -618,11 +618,17 @@ def test_is_aperiodic_selfloop(): assert nx.is_aperiodic(G) -def test_is_aperiodic_raise(): +def test_is_aperiodic_undirected_raises(): G = nx.Graph() pytest.raises(nx.NetworkXError, nx.is_aperiodic, G) +def test_is_aperiodic_empty_graph(): + G = nx.empty_graph(create_using=nx.DiGraph) + with pytest.raises(nx.NetworkXPointlessConcept, match="Graph has no nodes."): + nx.is_aperiodic(G) + + def test_is_aperiodic_bipartite(): # Bipartite graph G = nx.DiGraph(nx.davis_southern_women_graph()) diff --git a/networkx/algorithms/tests/test_distance_regular.py b/networkx/algorithms/tests/test_distance_regular.py index d336b188214..6cbeb2a6d73 100644 --- a/networkx/algorithms/tests/test_distance_regular.py +++ b/networkx/algorithms/tests/test_distance_regular.py @@ -1,3 +1,5 @@ +import pytest + import networkx as nx from networkx import is_strongly_regular @@ -41,6 +43,13 @@ def test_intersection_array(self): assert c == [1, 2, 5] +@pytest.mark.parametrize("f", (nx.is_distance_regular, nx.is_strongly_regular)) +def test_empty_graph_raises(f): + G = nx.Graph() + with pytest.raises(nx.NetworkXPointlessConcept, match="Graph has no nodes"): + f(G) + + class TestStronglyRegular: """Unit tests for the :func:`~networkx.is_strongly_regular` function. diff --git a/networkx/algorithms/tests/test_euler.py b/networkx/algorithms/tests/test_euler.py index 08eaf7fccc6..b5871f09b5a 100644 --- a/networkx/algorithms/tests/test_euler.py +++ b/networkx/algorithms/tests/test_euler.py @@ -5,6 +5,13 @@ import networkx as nx +@pytest.mark.parametrize("f", (nx.is_eulerian, nx.is_semieulerian)) +def test_empty_graph_raises(f): + G = nx.Graph() + with pytest.raises(nx.NetworkXPointlessConcept, match="Connectivity is undefined"): + f(G) + + class TestIsEulerian: def test_is_eulerian(self): assert nx.is_eulerian(nx.complete_graph(5)) diff --git a/networkx/algorithms/tests/test_regular.py b/networkx/algorithms/tests/test_regular.py index 0c8e4e46582..a8b4c3a30de 100644 --- a/networkx/algorithms/tests/test_regular.py +++ b/networkx/algorithms/tests/test_regular.py @@ -68,6 +68,12 @@ def test_is_regular4(self): assert reg.is_regular(g) +def test_is_regular_empty_graph_raises(): + G = nx.Graph() + with pytest.raises(nx.NetworkXPointlessConcept, match="Graph has no nodes"): + nx.is_regular(G) + + class TestIsKRegular: def test_is_k_regular1(self): g = gen.cycle_graph(4) diff --git a/networkx/algorithms/tree/tests/test_recognition.py b/networkx/algorithms/tree/tests/test_recognition.py index a9c6c5aade9..105f5a89e9b 100644 --- a/networkx/algorithms/tree/tests/test_recognition.py +++ b/networkx/algorithms/tree/tests/test_recognition.py @@ -119,6 +119,12 @@ def test_emptybranch(): assert not nx.is_arborescence(G) +def test_is_branching_empty_graph_raises(): + G = nx.DiGraph() + with pytest.raises(nx.NetworkXPointlessConcept, match="G has no nodes."): + nx.is_branching(G) + + def test_path(): G = nx.DiGraph() nx.add_path(G, range(5)) @@ -160,3 +166,9 @@ def test_notarborescence2(): G.add_edge(6, 4) assert not nx.is_branching(G) assert not nx.is_arborescence(G) + + +def test_is_arborescense_empty_graph_raises(): + G = nx.DiGraph() + with pytest.raises(nx.NetworkXPointlessConcept, match="G has no nodes."): + nx.is_arborescence(G) From 59e6b7fb041d0f3b4b17b8474fa7d1580c4f0d99 Mon Sep 17 00:00:00 2001 From: Aditi Juneja <91629733+Schefflera-Arboricola@users.noreply.github.com> Date: Mon, 18 Dec 2023 19:43:27 +0530 Subject: [PATCH 014/129] Added `NodeNotFound` exceptions to `_apply_prediction` and `simrank`, and ignored isolated nodes in `panther_similarity` (#7110) * ignoring isolated nodes in panther_similarity * added NodeNotFound in simrank_similarity * added NodeNotFound in _apply_prediction * bug fix * added Raises sec to all funcs and updated error msg * added Raises sec to simrank_similarity * added tests to test_similarity.py and updated simrank_similarity docs * updated link_prediction.py * updated and added tests in test_link_prediction.py * bug : updated NodeNotFound tests of simrank_similarity * bug : updated test_simrank_target_not_found and style fixes * added NetworkXUnfeasible to panther_similarity * bug fix * added NodeNotFound for panther_similarity * style fix * list() -> set() --- networkx/algorithms/link_prediction.py | 91 ++++++++++- networkx/algorithms/similarity.py | 49 +++++- .../algorithms/tests/test_link_prediction.py | 153 +++++++++--------- networkx/algorithms/tests/test_similarity.py | 25 +++ 4 files changed, 233 insertions(+), 85 deletions(-) diff --git a/networkx/algorithms/link_prediction.py b/networkx/algorithms/link_prediction.py index 7335a77f5e0..21e89fbf160 100644 --- a/networkx/algorithms/link_prediction.py +++ b/networkx/algorithms/link_prediction.py @@ -37,6 +37,12 @@ def _apply_prediction(G, func, ebunch=None): """ if ebunch is None: ebunch = nx.non_edges(G) + else: + for u, v in ebunch: + if u not in G: + raise nx.NodeNotFound(f"Node {u} not in G.") + if v not in G: + raise nx.NodeNotFound(f"Node {v} not in G.") return ((u, v, func(u, v)) for u, v in ebunch) @@ -72,6 +78,14 @@ def resource_allocation_index(G, ebunch=None): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their resource allocation index. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.complete_graph(5) @@ -127,6 +141,14 @@ def jaccard_coefficient(G, ebunch=None): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their Jaccard coefficient. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.complete_graph(5) @@ -186,6 +208,14 @@ def adamic_adar_index(G, ebunch=None): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their Adamic-Adar index. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.complete_graph(5) @@ -264,6 +294,17 @@ def common_neighbor_centrality(G, ebunch=None, alpha=0.8): pair of nodes and p is their Common Neighbor and Centrality based Parameterized Algorithm(CCPA) score. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + If self loops exsists in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.complete_graph(5) @@ -286,7 +327,7 @@ def common_neighbor_centrality(G, ebunch=None, alpha=0.8): def predict(u, v): if u == v: - raise nx.NetworkXAlgorithmError("Self links are not supported") + raise nx.NetworkXAlgorithmError("Self loops are not supported") return sum(1 for _ in nx.common_neighbors(G, u, v)) @@ -296,7 +337,7 @@ def predict(u, v): def predict(u, v): if u == v: - raise nx.NetworkXAlgorithmError("Self links are not supported") + raise nx.NetworkXAlgorithmError("Self loops are not supported") path_len = spl[u].get(v, inf) return alpha * sum(1 for _ in nx.common_neighbors(G, u, v)) + ( @@ -338,6 +379,14 @@ def preferential_attachment(G, ebunch=None): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their preferential attachment score. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.complete_graph(5) @@ -402,6 +451,17 @@ def cn_soundarajan_hopcroft(G, ebunch=None, community="community"): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their score. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.path_graph(3) @@ -477,6 +537,17 @@ def ra_index_soundarajan_hopcroft(G, ebunch=None, community="community"): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their score. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.Graph() @@ -553,6 +624,18 @@ def within_inter_cluster(G, ebunch=None, delta=0.001, community="community"): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their WIC measure. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + - If `delta` is less than or equal to zero. + - If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.Graph() @@ -601,4 +684,6 @@ def _community(G, u, community): try: return node_u[community] except KeyError as err: - raise nx.NetworkXAlgorithmError("No community information") from err + raise nx.NetworkXAlgorithmError( + f"No community information available for Node {u}" + ) from err diff --git a/networkx/algorithms/similarity.py b/networkx/algorithms/similarity.py index 92b0cf9a3e1..0ad6de6e6fe 100644 --- a/networkx/algorithms/similarity.py +++ b/networkx/algorithms/similarity.py @@ -1275,6 +1275,14 @@ def simrank(G, u, v): If neither ``source`` nor ``target`` is ``None``, this returns the similarity value for the given pair of nodes. + Raises + ------- + ExceededMaxIterations + If the algorithm does not converge within ``max_iterations``. + + NodeNotFound + If either ``source`` or ``target`` is not in `G`. + Examples -------- >>> G = nx.cycle_graph(2) @@ -1311,8 +1319,21 @@ def simrank(G, u, v): import numpy as np nodelist = list(G) - s_indx = None if source is None else nodelist.index(source) - t_indx = None if target is None else nodelist.index(target) + if source is not None: + if source not in nodelist: + raise nx.NodeNotFound(f"Source node {source} not in G") + else: + s_indx = nodelist.index(source) + else: + s_indx = None + + if target is not None: + if target not in nodelist: + raise nx.NodeNotFound(f"Target node {target} not in G") + else: + t_indx = nodelist.index(target) + else: + t_indx = None x = _simrank_similarity_numpy( G, s_indx, t_indx, importance_factor, max_iterations, tolerance @@ -1535,6 +1556,18 @@ def panther_similarity( the self-similarity (i.e., ``v``) will not be included in the returned dictionary. + Raises + ------- + NetworkXUnfeasible + If `source` is an isolated node. + + NodeNotFound + If `source` is not in `G`. + + Notes + ------- + The isolated nodes in `G` are ignored. + Examples -------- >>> G = nx.star_graph(10) @@ -1550,6 +1583,18 @@ def panther_similarity( """ import numpy as np + if source not in G: + raise nx.NodeNotFound(f"Source node {source} not in G") + + isolates = set(nx.isolates(G)) + + if source in isolates: + raise nx.NetworkXUnfeasible( + f"Panther similarity is not defined for the isolated source node {source}." + ) + + G = G.subgraph([node for node in G.nodes if node not in isolates]).copy() + num_nodes = G.number_of_nodes() if num_nodes < k: warnings.warn( diff --git a/networkx/algorithms/tests/test_link_prediction.py b/networkx/algorithms/tests/test_link_prediction.py index 7fc04d20672..b4643b3420d 100644 --- a/networkx/algorithms/tests/test_link_prediction.py +++ b/networkx/algorithms/tests/test_link_prediction.py @@ -34,23 +34,17 @@ def test_S4(self): G = nx.star_graph(4) self.test(G, [(1, 2)], [(1, 2, 0.25)]) - def test_notimplemented(self): - assert pytest.raises( - nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] - ) - assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiGraph([(0, 1), (1, 2)]), - [(0, 2)], - ) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiDiGraph([(0, 1), (1, 2)]), - [(0, 2)], + nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] ) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) @@ -80,23 +74,17 @@ def test_P4(self): G = nx.path_graph(4) self.test(G, [(0, 2)], [(0, 2, 0.5)]) - def test_notimplemented(self): - assert pytest.raises( - nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] - ) - assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiGraph([(0, 1), (1, 2)]), - [(0, 2)], - ) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiDiGraph([(0, 1), (1, 2)]), - [(0, 2)], + nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] ) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_edges_from([(0, 1), (2, 3)]) @@ -131,23 +119,17 @@ def test_S4(self): G = nx.star_graph(4) self.test(G, [(1, 2)], [(1, 2, 1 / math.log(4))]) - def test_notimplemented(self): - assert pytest.raises( - nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] - ) - assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiGraph([(0, 1), (1, 2)]), - [(0, 2)], - ) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiDiGraph([(0, 1), (1, 2)]), - [(0, 2)], + nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] ) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) @@ -190,6 +172,11 @@ def test_notimplemented(self, graph_type): nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] ) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) @@ -223,23 +210,17 @@ def test_S4(self): G = nx.star_graph(4) self.test(G, [(0, 2)], [(0, 2, 4)]) - def test_notimplemented(self): - assert pytest.raises( - nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] - ) - assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiGraph([(0, 1), (1, 2)]), - [(0, 2)], - ) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiDiGraph([(0, 1), (1, 2)]), - [(0, 2)], + nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] ) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_zero_degrees(self): G = nx.Graph() G.add_nodes_from([0, 1]) @@ -282,17 +263,21 @@ def test_S4(self): G.nodes[4]["community"] = 0 self.test(G, [(1, 2)], [(1, 2, 2)]) - def test_notimplemented(self): - G = nx.DiGraph([(0, 1), (1, 2)]) - G.add_nodes_from([0, 1, 2], community=0) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) - G = nx.MultiGraph([(0, 1), (1, 2)]) - G.add_nodes_from([0, 1, 2], community=0) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) - G = nx.MultiDiGraph([(0, 1), (1, 2)]) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + G = graph_type([(0, 1), (1, 2)]) G.add_nodes_from([0, 1, 2], community=0) assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) @@ -387,17 +372,21 @@ def test_S4(self): G.nodes[4]["community"] = 0 self.test(G, [(1, 2)], [(1, 2, 0.25)]) - def test_notimplemented(self): - G = nx.DiGraph([(0, 1), (1, 2)]) - G.add_nodes_from([0, 1, 2], community=0) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) - G = nx.MultiGraph([(0, 1), (1, 2)]) - G.add_nodes_from([0, 1, 2], community=0) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) - G = nx.MultiDiGraph([(0, 1), (1, 2)]) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + G = graph_type([(0, 1), (1, 2)]) G.add_nodes_from([0, 1, 2], community=0) assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) @@ -495,17 +484,21 @@ def test_S4(self): G.nodes[4]["community"] = 0 self.test(G, [(1, 2)], [(1, 2, 1 / self.delta)]) - def test_notimplemented(self): - G = nx.DiGraph([(0, 1), (1, 2)]) - G.add_nodes_from([0, 1, 2], community=0) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) - G = nx.MultiGraph([(0, 1), (1, 2)]) - G.add_nodes_from([0, 1, 2], community=0) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) - G = nx.MultiDiGraph([(0, 1), (1, 2)]) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + G = graph_type([(0, 1), (1, 2)]) G.add_nodes_from([0, 1, 2], community=0) assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) diff --git a/networkx/algorithms/tests/test_similarity.py b/networkx/algorithms/tests/test_similarity.py index b7fcf90fa43..420c709c173 100644 --- a/networkx/algorithms/tests/test_similarity.py +++ b/networkx/algorithms/tests/test_similarity.py @@ -699,6 +699,16 @@ def test_simrank_max_iterations(self, alg): G = nx.cycle_graph(5) pytest.raises(nx.ExceededMaxIterations, alg, G, max_iterations=10) + def test_simrank_source_not_found(self): + G = nx.cycle_graph(5) + with pytest.raises(nx.NodeNotFound, match="Source node 10 not in G"): + nx.simrank_similarity(G, source=10) + + def test_simrank_target_not_found(self): + G = nx.cycle_graph(5) + with pytest.raises(nx.NodeNotFound, match="Target node 10 not in G"): + nx.simrank_similarity(G, target=10) + def test_simrank_between_versions(self): G = nx.cycle_graph(5) # _python tolerance 1e-4 @@ -819,6 +829,21 @@ def test_panther_similarity_weighted(self): sim = nx.panther_similarity(G, "v1", path_length=2, weight="w") assert sim == expected + def test_panther_similarity_source_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (0, 3), (1, 2), (2, 4)]) + with pytest.raises(nx.NodeNotFound, match="Source node 10 not in G"): + nx.panther_similarity(G, source=10) + + def test_panther_similarity_isolated(self): + G = nx.Graph() + G.add_nodes_from(range(5)) + with pytest.raises( + nx.NetworkXUnfeasible, + match="Panther similarity is not defined for the isolated source node 1.", + ): + nx.panther_similarity(G, source=1) + def test_generate_random_paths_unweighted(self): np.random.seed(42) From 267e33e99be42ed717a69817ddfbbd5c20238a2e Mon Sep 17 00:00:00 2001 From: Ayooluwa <43212808+Ay-slim@users.noreply.github.com> Date: Mon, 18 Dec 2023 20:00:09 +0000 Subject: [PATCH 015/129] Document the walk_type argument default in directed_laplacian and similar functions. (#7171) * Fixes networkx/networkx#7147 * Word documentation change properly based on PR review Co-authored-by: Ross Barnowski * Update walk_type documentation in laplacian_centrality function * Update walk_type documentation in _transition_matrix and directed_combinatorial_laplacian matrix functions --------- Co-authored-by: Ross Barnowski --- networkx/algorithms/centrality/laplacian.py | 7 +++++-- networkx/linalg/laplacianmatrix.py | 21 +++++++++++++++------ 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/networkx/algorithms/centrality/laplacian.py b/networkx/algorithms/centrality/laplacian.py index e0a9a6d5172..d8a58e6b322 100644 --- a/networkx/algorithms/centrality/laplacian.py +++ b/networkx/algorithms/centrality/laplacian.py @@ -50,8 +50,11 @@ def laplacian_centrality( walk_type : string or None, optional (default=None) Optional parameter `walk_type` used when calling :func:`directed_laplacian_matrix `. - If None, the transition matrix is selected depending on the properties - of the graph. Otherwise can be `random`, `lazy`, or `pagerank`. + One of ``"random"``, ``"lazy"``, or ``"pagerank"``. If ``walk_type=None`` + (the default), then a value is selected according to the properties of `G`: + - ``walk_type="random"`` if `G` is strongly connected and aperiodic + - ``walk_type="lazy"`` if `G` is strongly connected but not aperiodic + - ``walk_type="pagerank"`` for all other cases. alpha : real (default = 0.95) Optional parameter `alpha` used when calling diff --git a/networkx/linalg/laplacianmatrix.py b/networkx/linalg/laplacianmatrix.py index 13763828131..17bfd6d362b 100644 --- a/networkx/linalg/laplacianmatrix.py +++ b/networkx/linalg/laplacianmatrix.py @@ -219,8 +219,11 @@ def directed_laplacian_matrix( If None, then each edge has weight 1. walk_type : string or None, optional (default=None) - If None, `P` is selected depending on the properties of the - graph. Otherwise is one of 'random', 'lazy', or 'pagerank' + One of ``"random"``, ``"lazy"``, or ``"pagerank"``. If ``walk_type=None`` + (the default), then a value is selected according to the properties of `G`: + - ``walk_type="random"`` if `G` is strongly connected and aperiodic + - ``walk_type="lazy"`` if `G` is strongly connected but not aperiodic + - ``walk_type="pagerank"`` for all other cases. alpha : real (1 - alpha) is the teleportation probability used with pagerank @@ -307,8 +310,11 @@ def directed_combinatorial_laplacian_matrix( If None, then each edge has weight 1. walk_type : string or None, optional (default=None) - If None, `P` is selected depending on the properties of the - graph. Otherwise is one of 'random', 'lazy', or 'pagerank' + One of ``"random"``, ``"lazy"``, or ``"pagerank"``. If ``walk_type=None`` + (the default), then a value is selected according to the properties of `G`: + - ``walk_type="random"`` if `G` is strongly connected and aperiodic + - ``walk_type="lazy"`` if `G` is strongly connected but not aperiodic + - ``walk_type="pagerank"`` for all other cases. alpha : real (1 - alpha) is the teleportation probability used with pagerank @@ -372,8 +378,11 @@ def _transition_matrix(G, nodelist=None, weight="weight", walk_type=None, alpha= If None, then each edge has weight 1. walk_type : string or None, optional (default=None) - If None, `P` is selected depending on the properties of the - graph. Otherwise is one of 'random', 'lazy', or 'pagerank' + One of ``"random"``, ``"lazy"``, or ``"pagerank"``. If ``walk_type=None`` + (the default), then a value is selected according to the properties of `G`: + - ``walk_type="random"`` if `G` is strongly connected and aperiodic + - ``walk_type="lazy"`` if `G` is strongly connected but not aperiodic + - ``walk_type="pagerank"`` for all other cases. alpha : real (1 - alpha) is the teleportation probability used with pagerank From d042db56312193f3f397877cffba00eb951dcd43 Mon Sep 17 00:00:00 2001 From: Mridul Seth Date: Thu, 21 Dec 2023 01:16:40 +0400 Subject: [PATCH 016/129] DOC: Add plots to classic graph generators docs (#7114) * DOC: Add plots to classic graph generators docs * Update conf.py * make pre-commit happy --- doc/conf.py | 4 +- networkx/generators/classic.py | 76 +++++++++++++++++++++++++++++++++- 2 files changed, 78 insertions(+), 2 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 1d8ea398768..f54a0228992 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -248,9 +248,11 @@ plot_pre_code = """ import networkx as nx +import numpy as np +np.random.seed(42) """ -plot_formats = [("png", 100), "pdf"] +plot_formats = [("png", 100)] def setup(app): diff --git a/networkx/generators/classic.py b/networkx/generators/classic.py index 09d5d9938ed..4bb660b5a8a 100644 --- a/networkx/generators/classic.py +++ b/networkx/generators/classic.py @@ -75,6 +75,10 @@ def full_rary_tree(r, n, create_using=None): (if a leaf at the bottom level is missing, then so are all of the leaves to its right." [1]_ + .. plot:: + + >>> nx.draw(nx.full_rary_tree(2, 10)) + Parameters ---------- r : int @@ -103,6 +107,10 @@ def full_rary_tree(r, n, create_using=None): def balanced_tree(r, h, create_using=None): """Returns the perfectly balanced `r`-ary tree of height `h`. + .. plot:: + + >>> nx.draw(nx.balanced_tree(2, 3)) + Parameters ---------- r : int @@ -149,6 +157,10 @@ def balanced_tree(r, h, create_using=None): def barbell_graph(m1, m2, create_using=None): """Returns the Barbell Graph: two complete graphs connected by a path. + .. plot:: + + >>> nx.draw(nx.barbell_graph(4, 2)) + Parameters ---------- m1 : int @@ -222,6 +234,10 @@ def binomial_tree(n, create_using=None): is defined recursively by linking two binomial trees of order k-1: the root of one is the leftmost child of the root of the other. + .. plot:: + + >>> nx.draw(nx.binomial_tree(3)) + Parameters ---------- n : int @@ -305,6 +321,10 @@ def circular_ladder_graph(n, create_using=None): Node labels are the integers 0 to n-1 + .. plot:: + + >>> nx.draw(nx.circular_ladder_graph(5)) + """ G = ladder_graph(n, create_using) G.add_edge(0, n - 1) @@ -320,6 +340,10 @@ def circulant_graph(n, offsets, create_using=None): such that node $i$ is connected to nodes $(i + x) \mod n$ and $(i - x) \mod n$ for all $x$ in $x_1, ..., x_m$. Thus $Ci_n(1)$ is a cycle graph. + .. plot:: + + >>> nx.draw(nx.circulant_graph(10, [1])) + Parameters ---------- n : integer @@ -392,6 +416,10 @@ def cycle_graph(n, create_using=None): $C_n$ is a path with its two end-nodes connected. + .. plot:: + + >>> nx.draw(nx.cycle_graph(5)) + Parameters ---------- n : int or iterable container of nodes @@ -422,6 +450,10 @@ def dorogovtsev_goltsev_mendes_graph(n, create_using=None): - Total number of nodes = ``3 * (3**n + 1) / 2`` - Total number of edges = ``3 ** (n + 1)`` + .. plot:: + + >>> nx.draw(nx.dorogovtsev_goltsev_mendes_graph(3)) + Parameters ---------- n : integer @@ -476,6 +508,10 @@ def dorogovtsev_goltsev_mendes_graph(n, create_using=None): def empty_graph(n=0, create_using=None, default=Graph): """Returns the empty graph with n nodes and zero edges. + .. plot:: + + >>> nx.draw(nx.empty_graph(5)) + Parameters ---------- n : int or iterable container of nodes (default = 0) @@ -575,6 +611,10 @@ def ladder_graph(n, create_using=None): Node labels are the integers 0 to 2*n - 1. + .. plot:: + + >>> nx.draw(nx.ladder_graph(5)) + """ G = empty_graph(2 * n, create_using) if G.is_directed(): @@ -592,6 +632,10 @@ def lollipop_graph(m, n, create_using=None): This is the Barbell Graph without the right barbell. + .. plot:: + + >>> nx.draw(nx.lollipop_graph(3, 4)) + Parameters ---------- m, n : int or iterable container of nodes @@ -664,6 +708,10 @@ def null_graph(create_using=None): def path_graph(n, create_using=None): """Returns the Path graph `P_n` of linearly connected nodes. + .. plot:: + + >>> nx.draw(nx.path_graph(5)) + Parameters ---------- n : int or iterable @@ -688,6 +736,10 @@ def star_graph(n, create_using=None): The star graph consists of one center node connected to n outer nodes. + .. plot:: + + >>> nx.draw(nx.star_graph(6)) + Parameters ---------- n : int or iterable @@ -724,6 +776,10 @@ def tadpole_graph(m, n, create_using=None): This graph on m+n nodes connects a cycle of size `m` to a path of length `n`. It looks like a tadpole. It is also called a kite graph or a dragon graph. + .. plot:: + + >>> nx.draw(nx.tadpole_graph(3, 5)) + Parameters ---------- m, n : int or iterable container of nodes @@ -776,7 +832,13 @@ def tadpole_graph(m, n, create_using=None): @nx._dispatch(graphs=None) def trivial_graph(create_using=None): - """Return the Trivial graph with one node (with label 0) and no edges.""" + """Return the Trivial graph with one node (with label 0) and no edges. + + .. plot:: + + >>> nx.draw(nx.trivial_graph(), with_labels=True) + + """ G = empty_graph(1, create_using) return G @@ -793,6 +855,10 @@ def turan_graph(n, r): $r-(n \mod r)$ partitions of size $n/r$, rounded down, and $n \mod r$ partitions of size $n/r+1$, rounded down. + .. plot:: + + >>> nx.draw(nx.turan_graph(6, 2)) + Parameters ---------- n : int @@ -822,6 +888,10 @@ def wheel_graph(n, create_using=None): The wheel graph consists of a hub node connected to a cycle of (n-1) nodes. + .. plot:: + + >>> nx.draw(nx.wheel_graph(5)) + Parameters ---------- n : int or iterable @@ -851,6 +921,10 @@ def wheel_graph(n, create_using=None): def complete_multipartite_graph(*subset_sizes): """Returns the complete multipartite graph with the specified subset sizes. + .. plot:: + + >>> nx.draw(nx.complete_multipartite_graph(1, 2, 3)) + Parameters ---------- subset_sizes : tuple of integers or tuple of node iterables From b7d0b0cc8f7395dc9668d25379f3615752112c20 Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Thu, 21 Dec 2023 02:01:10 -0800 Subject: [PATCH 017/129] Fix not_implemented_for decorator for is_regular and related functions (#7182) * Add tests for NotImplementedError for is_regular. * Fix NetworkXNotImplemented for is_regular and related fns. * Handle similar instances in maxcut. * Handle similar case for asyn_fluid. * Handle similar case for tree_isomorphism. --- networkx/algorithms/approximation/maxcut.py | 6 ++++-- .../algorithms/approximation/tests/test_maxcut.py | 12 ++++++++++++ networkx/algorithms/community/asyn_fluid.py | 3 ++- .../algorithms/community/tests/test_asyn_fluid.py | 7 +++++++ networkx/algorithms/distance_regular.py | 6 ++++-- .../isomorphism/tests/test_tree_isomorphism.py | 10 ++++++++++ networkx/algorithms/isomorphism/tree_isomorphism.py | 3 ++- networkx/algorithms/tests/test_distance_regular.py | 10 ++++++++++ 8 files changed, 51 insertions(+), 6 deletions(-) diff --git a/networkx/algorithms/approximation/maxcut.py b/networkx/algorithms/approximation/maxcut.py index ec62b346bb4..31ccdc9fde3 100644 --- a/networkx/algorithms/approximation/maxcut.py +++ b/networkx/algorithms/approximation/maxcut.py @@ -4,7 +4,8 @@ __all__ = ["randomized_partitioning", "one_exchange"] -@not_implemented_for("directed", "multigraph") +@not_implemented_for("directed") +@not_implemented_for("multigraph") @py_random_state(1) @nx._dispatch(edge_attrs="weight") def randomized_partitioning(G, seed=None, p=0.5, weight=None): @@ -49,7 +50,8 @@ def _swap_node_partition(cut, node): return cut - {node} if node in cut else cut.union({node}) -@not_implemented_for("directed", "multigraph") +@not_implemented_for("directed") +@not_implemented_for("multigraph") @py_random_state(2) @nx._dispatch(edge_attrs="weight") def one_exchange(G, initial_cut=None, seed=None, weight=None): diff --git a/networkx/algorithms/approximation/tests/test_maxcut.py b/networkx/algorithms/approximation/tests/test_maxcut.py index 39291fbf14d..ef0424401e4 100644 --- a/networkx/algorithms/approximation/tests/test_maxcut.py +++ b/networkx/algorithms/approximation/tests/test_maxcut.py @@ -1,9 +1,21 @@ import random +import pytest + import networkx as nx from networkx.algorithms.approximation import maxcut +@pytest.mark.parametrize( + "f", (nx.approximation.randomized_partitioning, nx.approximation.one_exchange) +) +@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph)) +def test_raises_on_directed_and_multigraphs(f, graph_constructor): + G = graph_constructor([(0, 1), (1, 2)]) + with pytest.raises(nx.NetworkXNotImplemented): + f(G) + + def _is_valid_cut(G, set1, set2): union = set1.union(set2) assert union == set(G.nodes) diff --git a/networkx/algorithms/community/asyn_fluid.py b/networkx/algorithms/community/asyn_fluid.py index 4c2223e6331..490bb046e7a 100644 --- a/networkx/algorithms/community/asyn_fluid.py +++ b/networkx/algorithms/community/asyn_fluid.py @@ -10,7 +10,8 @@ __all__ = ["asyn_fluidc"] -@not_implemented_for("directed", "multigraph") +@not_implemented_for("directed") +@not_implemented_for("multigraph") @py_random_state(3) @nx._dispatch def asyn_fluidc(G, k, max_iter=100, seed=None): diff --git a/networkx/algorithms/community/tests/test_asyn_fluid.py b/networkx/algorithms/community/tests/test_asyn_fluid.py index cd108dda369..6c023be773d 100644 --- a/networkx/algorithms/community/tests/test_asyn_fluid.py +++ b/networkx/algorithms/community/tests/test_asyn_fluid.py @@ -5,6 +5,13 @@ from networkx.algorithms.community import asyn_fluidc +@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph)) +def test_raises_on_directed_and_multigraphs(graph_constructor): + G = graph_constructor([(0, 1), (1, 2)]) + with pytest.raises(nx.NetworkXNotImplemented): + nx.community.asyn_fluidc(G, 1) + + def test_exceptions(): test = Graph() test.add_node("a") diff --git a/networkx/algorithms/distance_regular.py b/networkx/algorithms/distance_regular.py index da3754330d0..936fd5a2216 100644 --- a/networkx/algorithms/distance_regular.py +++ b/networkx/algorithms/distance_regular.py @@ -109,7 +109,8 @@ def global_parameters(b, c): return ((y, b[0] - x - y, x) for x, y in zip(b + [0], [0] + c)) -@not_implemented_for("directed", "multigraph") +@not_implemented_for("directed") +@not_implemented_for("multigraph") @nx._dispatch def intersection_array(G): """Returns the intersection array of a distance-regular graph. @@ -181,7 +182,8 @@ def intersection_array(G): # TODO There is a definition for directed strongly regular graphs. -@not_implemented_for("directed", "multigraph") +@not_implemented_for("directed") +@not_implemented_for("multigraph") @nx._dispatch def is_strongly_regular(G): """Returns True if and only if the given graph is strongly diff --git a/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py b/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py index 95e5fec8728..fa1ab9bbaef 100644 --- a/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py +++ b/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py @@ -1,6 +1,8 @@ import random import time +import pytest + import networkx as nx from networkx.algorithms.isomorphism.tree_isomorphism import ( rooted_tree_isomorphism, @@ -9,6 +11,14 @@ from networkx.classes.function import is_directed +@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph)) +def test_tree_isomorphism_raises_on_directed_and_multigraphs(graph_constructor): + t1 = graph_constructor([(0, 1)]) + t2 = graph_constructor([(1, 2)]) + with pytest.raises(nx.NetworkXNotImplemented): + nx.isomorphism.tree_isomorphism(t1, t2) + + # have this work for graph # given two trees (either the directed or undirected) # transform t2 according to the isomorphism diff --git a/networkx/algorithms/isomorphism/tree_isomorphism.py b/networkx/algorithms/isomorphism/tree_isomorphism.py index 6e935063336..c0ef821c1e3 100644 --- a/networkx/algorithms/isomorphism/tree_isomorphism.py +++ b/networkx/algorithms/isomorphism/tree_isomorphism.py @@ -209,7 +209,8 @@ def rooted_tree_isomorphism(t1, root1, t2, root2): return isomorphism -@not_implemented_for("directed", "multigraph") +@not_implemented_for("directed") +@not_implemented_for("multigraph") @nx._dispatch(graphs={"t1": 0, "t2": 1}) def tree_isomorphism(t1, t2): """ diff --git a/networkx/algorithms/tests/test_distance_regular.py b/networkx/algorithms/tests/test_distance_regular.py index 6cbeb2a6d73..545fb6dee6a 100644 --- a/networkx/algorithms/tests/test_distance_regular.py +++ b/networkx/algorithms/tests/test_distance_regular.py @@ -4,6 +4,16 @@ from networkx import is_strongly_regular +@pytest.mark.parametrize( + "f", (nx.is_distance_regular, nx.intersection_array, nx.is_strongly_regular) +) +@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph)) +def test_raises_on_directed_and_multigraphs(f, graph_constructor): + G = graph_constructor([(0, 1), (1, 2)]) + with pytest.raises(nx.NetworkXNotImplemented): + f(G) + + class TestDistanceRegular: def test_is_distance_regular(self): assert nx.is_distance_regular(nx.icosahedral_graph()) From a97c162a9a4a055aeba705b958082fee2c4d8bca Mon Sep 17 00:00:00 2001 From: Dan Schult Date: Fri, 22 Dec 2023 03:41:15 -0500 Subject: [PATCH 018/129] New PR for Fixes minimal d-separator function failing to handle cases where no d-separators exist (#7019) * add check that minimal d-separating set is valid sepset * make api change note * add edge case where proposed sepset is not empty but does not separate * fix typo in test * improve efficiency of d_sep check * finish fixing failing unit tests * remove xml * use existing ancestors function * check non disjoint node sets raise error * enforce disjoint node sets for d-separation * add test for nondisjoint sets raising error * fix test errors. allow `i` and `r` to be nodes instead of sets. * add keyword-only indicators * rename u -> x and v -> y; fix docs quotations * rename i -> included and r -> restricted * docstrings * allow `included`, `restricted` to be nodes * fix raise error for nondisjoint sets * added nondisjoint test for `minimal_d_separated` * added test of `included` and `restricted` * add test of `included` and `restricted`; found bug * rebase on main * update func sigs according to dschult's suggestion * updated docs with new func sig; ref new algorithm * update `find_min...` to new alg; outline 2 helper funcs * Designate 3.2 release * implemented reachable; tests pass * typo * updated `find_minimal_d_separator` docs * finish implement new alg for `is/find_min_d_sep..` * reworking and adding to docs for d-separation * remove release note * cleanup extra files * updated input checking and doc_strings * deprecate d_separated and minimal_d_separator * change stacklevel of deprecations --------- Co-authored-by: Jaron Lee Co-authored-by: Alex Markham Co-authored-by: Alex Markham --- doc/developer/deprecations.rst | 2 + doc/reference/algorithms/d_separation.rst | 4 +- networkx/algorithms/d_separation.py | 797 ++++++++++++------ .../algorithms/tests/test_d_separation.py | 194 ++++- networkx/conftest.py | 6 + 5 files changed, 698 insertions(+), 305 deletions(-) diff --git a/doc/developer/deprecations.rst b/doc/developer/deprecations.rst index 3b8ce95c573..8dfbccdc7ca 100644 --- a/doc/developer/deprecations.rst +++ b/doc/developer/deprecations.rst @@ -64,6 +64,8 @@ Version 3.5 ~~~~~~~~~~~ * Remove ``all_triplets`` from ``algorithms/triads.py`` * Remove ``random_triad`` from ``algorithms/triad.py``. +* Remove ``d_separated`` from ``algorithms/d_separation.py``. +* Remove ``minimal_d_separator`` from ``algorithms/d_separation.py``. * Add `not_implemented_for("multigraph”)` decorator to ``k_core``, ``k_shell``, ``k_crust`` and ``k_corona`` functions. * Change ``single_target_shortest_path_length`` in ``algorithms/shortest_path/unweighted.py`` to return a dict. See #6527 diff --git a/doc/reference/algorithms/d_separation.rst b/doc/reference/algorithms/d_separation.rst index 053471b575b..09d34965e6a 100644 --- a/doc/reference/algorithms/d_separation.rst +++ b/doc/reference/algorithms/d_separation.rst @@ -6,6 +6,6 @@ D-Separation .. autosummary:: :toctree: generated/ - d_separated + is_d_separator is_minimal_d_separator - minimal_d_separator + find_minimal_d_separator diff --git a/networkx/algorithms/d_separation.py b/networkx/algorithms/d_separation.py index 4322b095822..dc1e105ee3a 100644 --- a/networkx/algorithms/d_separation.py +++ b/networkx/algorithms/d_separation.py @@ -11,88 +11,183 @@ algorithm presented in [2]_. Refer to [3]_, [4]_ for a couple of alternative algorithms. -Here, we provide a brief overview of d-separation and related concepts that -are relevant for understanding it: - -Blocking paths --------------- +The functional interface in NetworkX consists of three functions: -Before we overview, we introduce the following terminology to describe paths: +- `find_minimal_d_separator` returns a minimal d-separator set ``z``. + That is, removing any node or nodes from it makes it no longer a d-separator. +- `is_d_separator` checks if a given set is a d-separator. +- `is_minimal_d_separator` checks if a given set is a minimal d-separator. -- "open" path: A path between two nodes that can be traversed -- "blocked" path: A path between two nodes that cannot be traversed +D-separators +------------ -A **collider** is a triplet of nodes along a path that is like the following: -``... u -> c <- v ...``), where 'c' is a common successor of ``u`` and ``v``. A path -through a collider is considered "blocked". When -a node that is a collider, or a descendant of a collider is included in -the d-separating set, then the path through that collider node is "open". If the -path through the collider node is open, then we will call this node an open collider. +Here, we provide a brief overview of d-separation and related concepts that +are relevant for understanding it: -The d-separation set blocks the paths between ``u`` and ``v``. If you include colliders, -or their descendant nodes in the d-separation set, then those colliders will open up, -enabling a path to be traversed if it is not blocked some other way. +The ideas of d-separation and d-connection relate to paths being open or blocked. + +- A "path" is a sequence of nodes connected in order by edges. Unlike for most + graph theory analysis, the direction of the edges is ignored. Thus the path + can be thought of as a traditional path on the undirected version of the graph. +- A "candidate d-separator" ``z`` is a set of nodes being considered as + possibly blocking all paths between two prescribed sets ``x`` and ``y`` of nodes. + We refer to each node in the candidate d-separator as "known". +- A "collider" node on a path is a node that is a successor of its two neighbor + nodes on the path. That is, ``c`` is a collider if the edge directions + along the path look like ``... u -> c <- v ...``. +- If a collider node or any of its descendants are "known", the collider + is called an "open collider". Otherwise it is a "blocking collider". +- Any path can be "blocked" in two ways. If the path contains a "known" node + that is not a collider, the path is blocked. Also, if the path contains a + collider that is not a "known" node, the path is blocked. +- A path is "open" if it is not blocked. That is, it is open if every node is + either an open collider or not a "known". Said another way, every + "known" in the path is a collider and every collider is open (has a + "known" as a inclusive descendant). The concept of "open path" is meant to + demonstrate a probabilistic conditional dependence between two nodes given + prescribed knowledge ("known" nodes). +- Two sets ``x`` and ``y`` of nodes are "d-separated" by a set of nodes ``z`` + if all paths between nodes in ``x`` and nodes in ``y`` are blocked. That is, + if there are no open paths from any node in ``x`` to any node in ``y``. + Such a set ``z`` is a "d-separator" of ``x`` and ``y``. +- A "minimal d-separator" is a d-separator ``z`` for which no node or subset + of nodes can be removed with it still being a d-separator. + +The d-separator blocks some paths between ``x`` and ``y`` but opens others. +Nodes in the d-separator block paths if the nodes are not colliders. +But if a collider or its descendant nodes are in the d-separation set, the +colliders are open, allowing a path through that collider. Illustration of D-separation with examples ------------------------------------------ -For a pair of two nodes, ``u`` and ``v``, all paths are considered open if -there is a path between ``u`` and ``v`` that is not blocked. That means, there is an open -path between ``u`` and ``v`` that does not encounter a collider, or a variable in the -d-separating set. +A pair of two nodes, ``u`` and ``v``, are d-connected if there is a path +from ``u`` to ``v`` that is not blocked. That means, there is an open +path from ``u`` to ``v``. For example, if the d-separating set is the empty set, then the following paths are -unblocked between ``u`` and ``v``: +open between ``u`` and ``v``: -- u <- z -> v -- u -> w -> ... -> z -> v +- u <- n -> v +- u -> w -> ... -> n -> v -If for example, 'z' is in the d-separating set, then 'z' blocks those paths -between ``u`` and ``v``. +If on the other hand, ``n`` is in the d-separating set, then ``n`` blocks +those paths between ``u`` and ``v``. -Colliders block a path by default if they and their descendants are not included -in the d-separating set. An example of a path that is blocked when the d-separating -set is empty is: +Colliders block a path if they and their descendants are not included +in the d-separating set. An example of a path that is blocked when the +d-separating set is empty is: -- u -> w -> ... -> z <- v +- u -> w -> ... -> n <- v -because 'z' is a collider in this path and 'z' is not in the d-separating set. However, -if 'z' or a descendant of 'z' is included in the d-separating set, then the path through -the collider at 'z' (... -> z <- ...) is now "open". +The node ``n`` is a collider in this path and is not in the d-separating set. +So ``n`` blocks this path. However, if ``n`` or a descendant of ``n`` is +included in the d-separating set, then the path through the collider +at ``n`` (... -> n <- ...) is "open". -D-separation is concerned with blocking all paths between u and v. Therefore, a -d-separating set between ``u`` and ``v`` is one where all paths are blocked. +D-separation is concerned with blocking all paths between nodes from ``x`` to ``y``. +A d-separating set between ``x`` and ``y`` is one where all paths are blocked. D-separation and its applications in probability ------------------------------------------------ -D-separation is commonly used in probabilistic graphical models. D-separation +D-separation is commonly used in probabilistic causal-graph models. D-separation connects the idea of probabilistic "dependence" with separation in a graph. If -one assumes the causal Markov condition [5]_, then d-separation implies conditional -independence in probability distributions. +one assumes the causal Markov condition [5]_, (every node is conditionally +independent of its non-descendants, given its parents) then d-separation implies +conditional independence in probability distributions. +Symmetrically, d-connection implies dependence. + +The intuition is as follows. The edges on a causal graph indicate which nodes +influence the outcome of other nodes directly. An edge from u to v +implies that the outcome of event ``u`` influences the probabilities for +the outcome of event ``v``. Certainly knowing ``u`` changes predictions for ``v``. +But also knowing ``v`` changes predictions for ``u``. The outcomes are dependent. +Furthermore, an edge from ``v`` to ``w`` would mean that ``w`` and ``v`` are dependent +and thus that ``u`` could indirectly influence ``w``. + +Without any knowledge about the system (candidate d-separating set is empty) +a causal graph ``u -> v -> w`` allows all three nodes to be dependent. But +if we know the outcome of ``v``, the conditional probabilities of outcomes for +``u`` and ``w`` are independent of each other. That is, once we know the outcome +for ```v`, the probabilities for ``w`` do not depend on the outcome for ``u``. +This is the idea behind ``v`` blocking the path if it is "known" (in the candidate +d-separating set). + +The same argument works whether the direction of the edges are both +left-going and when both arrows head out from the middle. Having a "known" +node on a path blocks the collider-free path because those relationships +make the conditional probabilities independent. + +The direction of the causal edges does impact dependence precisely in the +case of a collider e.g. ``u -> v <- w``. In that situation, both ``u`` and ``w`` +influence ``v```. But they do not directly influence each other. So without any +knowledge of any outcomes, ``u`` and ``w`` are independent. That is the idea behind +colliders blocking the path. But, if ``v`` is known, the conditional probabilities +of ``u`` and ``w`` can be dependent. This is the heart of Berkson's Paradox [6]_. +For example, suppose ``u`` and ``w`` are boolean events (they either happen or do not) +and ``v`` represents the outcome "at least one of ``u`` and ``w`` occur". Then knowing +``v`` is true makes the conditional probabilities of ``u`` and ``w`` dependent. +Essentially, knowing that at least one of them is true raises the probability of +each. But further knowledge that ``w`` is true (or false) change the conditional +probability of ``u`` to either the original value or 1. So the conditional +probability of ``u`` depends on the outcome of ``w`` even though there is no +causal relationship between them. When a collider is known, dependence can +occur across paths through that collider. This is the reason open colliders +do not block paths. + +Furthermore, even if ``v`` is not "known", if one of its descendants is "known" +we can use that information to know more about ``v`` which again makes +``u`` and ``w`` potentially dependent. Suppose the chance of ``n`` occurring +is much higher when ``v`` occurs ("at least one of ``u`` and ``w`` occur"). +Then if we know ``n`` occurred, it is more likely that ``v`` occurred and that +makes the chance of ``u`` and ``w`` dependent. This is the idea behind why +a collider does no block a path if any descendant of the collider is "known". + +When two sets of nodes ``x`` and ``y`` are d-separated by a set ``z``, +it means that given the outcomes of the nodes in ``z``, the probabilities +of outcomes of the nodes in ``x`` are independent of the outcomes of the +nodes in ``y`` and vice versa. Examples -------- - ->>> ->>> # HMM graph with five states and observation nodes -... g = nx.DiGraph() ->>> g.add_edges_from( +A Hidden Markov Model with 5 observed states and 5 hidden states +where the hidden states have causal relationships resulting in +a path results in the following causal network. We check that +early states along the path are separated from late state in +the path by the d-separator of the middle hidden state. +Thus if we condition on the middle hidden state, the early +state probabilities are independent of the late state outcomes. + +>>> G = nx.DiGraph() +>>> G.add_edges_from( ... [ -... ("S1", "S2"), -... ("S2", "S3"), -... ("S3", "S4"), -... ("S4", "S5"), -... ("S1", "O1"), -... ("S2", "O2"), -... ("S3", "O3"), -... ("S4", "O4"), -... ("S5", "O5"), +... ("H1", "H2"), +... ("H2", "H3"), +... ("H3", "H4"), +... ("H4", "H5"), +... ("H1", "O1"), +... ("H2", "O2"), +... ("H3", "O3"), +... ("H4", "O4"), +... ("H5", "O5"), ... ] ... ) ->>> ->>> # states/obs before 'S3' are d-separated from states/obs after 'S3' -... nx.d_separated(g, {"S1", "S2", "O1", "O2"}, {"S4", "S5", "O4", "O5"}, {"S3"}) +>>> x, y, z = ({"H1", "O1"}, {"H5", "O5"}, {"H3"}) +>>> nx.is_d_separator(G, x, y, z) +True +>>> nx.is_minimal_d_separator(G, x, y, z) +True +>>> nx.is_minimal_d_separator(G, x, y, z | {"O3"}) +False +>>> z = nx.find_minimal_d_separator(G, x | y, {"O2", "O3", "O4"}) +>>> z == {"H2", "H4"} +True + +If no minimal_d_separator exists, `None` is returned + +>>> other_z = nx.find_minimal_d_separator(G, x | y, {"H2", "H3"}) +>>> other_z is None True @@ -101,142 +196,192 @@ .. [1] Pearl, J. (2009). Causality. Cambridge: Cambridge University Press. -.. [2] Darwiche, A. (2009). Modeling and reasoning with Bayesian networks. +.. [2] Darwiche, A. (2009). Modeling and reasoning with Bayesian networks. Cambridge: Cambridge University Press. -.. [3] Shachter, R. D. (1998). - Bayes-ball: rational pastime (for determining irrelevance and requisite - information in belief networks and influence diagrams). - In , Proceedings of the Fourteenth Conference on Uncertainty in Artificial - Intelligence (pp. 480–487). - San Francisco, CA, USA: Morgan Kaufmann Publishers Inc. +.. [3] Shachter, Ross D. "Bayes-ball: The rational pastime (for + determining irrelevance and requisite information in belief networks + and influence diagrams)." In Proceedings of the Fourteenth Conference + on Uncertainty in Artificial Intelligence (UAI), (pp. 480–487). 1998. .. [4] Koller, D., & Friedman, N. (2009). Probabilistic graphical models: principles and techniques. The MIT Press. .. [5] https://en.wikipedia.org/wiki/Causal_Markov_condition +.. [6] https://en.wikipedia.org/wiki/Berkson%27s_paradox + """ from collections import deque +from itertools import chain import networkx as nx from networkx.utils import UnionFind, not_implemented_for -__all__ = ["d_separated", "minimal_d_separator", "is_minimal_d_separator"] +__all__ = [ + "is_d_separator", + "is_minimal_d_separator", + "find_minimal_d_separator", + "d_separated", + "minimal_d_separator", +] @not_implemented_for("undirected") @nx._dispatch -def d_separated(G, x, y, z): - """ - Return whether node sets ``x`` and ``y`` are d-separated by ``z``. +def is_d_separator(G, x, y, z): + """Return whether node sets `x` and `y` are d-separated by `z`. Parameters ---------- - G : graph + G : nx.DiGraph A NetworkX DAG. - x : set - First set of nodes in ``G``. + x : node or set of nodes + First node or set of nodes in `G`. - y : set - Second set of nodes in ``G``. + y : node or set of nodes + Second node or set of nodes in `G`. - z : set - Set of conditioning nodes in ``G``. Can be empty set. + z : node or set of nodes + Potential separator (set of conditioning nodes in `G`). Can be empty set. Returns ------- b : bool - A boolean that is true if ``x`` is d-separated from ``y`` given ``z`` in ``G``. + A boolean that is true if `x` is d-separated from `y` given `z` in `G`. Raises ------ NetworkXError - The *d-separation* test is commonly used with directed - graphical models which are acyclic. Accordingly, the algorithm - raises a :exc:`NetworkXError` if the input graph is not a DAG. + The *d-separation* test is commonly used on disjoint sets of + nodes in acyclic directed graphs. Accordingly, the algorithm + raises a :exc:`NetworkXError` if the node sets are not + disjoint or if the input graph is not a DAG. NodeNotFound If any of the input nodes are not found in the graph, - a :exc:`NodeNotFound` exception is raised. + a :exc:`NodeNotFound` exception is raised Notes ----- A d-separating set in a DAG is a set of nodes that blocks all paths between the two sets. Nodes in `z` block a path if they are part of the path and are not a collider, - or a descendant of a collider. A collider structure along a path + or a descendant of a collider. Also colliders that are not in `z` + block a path. A collider structure along a path is ``... -> c <- ...`` where ``c`` is the collider node. https://en.wikipedia.org/wiki/Bayesian_network#d-separation """ + try: + x = {x} if x in G else x + y = {y} if y in G else y + z = {z} if z in G else z + + intersection = x & y or x & z or y & z + if intersection: + raise nx.NetworkXError( + f"The sets are not disjoint, with intersection {intersection}" + ) + + set_v = x | y | z + if set_v - G.nodes: + raise nx.NodeNotFound(f"The node(s) {set_v - G.nodes} are not found in G") + except TypeError: + raise nx.NodeNotFound("One of x, y, or z is not a node or a set of nodes in G") if not nx.is_directed_acyclic_graph(G): raise nx.NetworkXError("graph should be directed acyclic") - union_xyz = x.union(y).union(z) - - if any(n not in G.nodes for n in union_xyz): - raise nx.NodeNotFound("one or more specified nodes not found in the graph") - - G_copy = G.copy() - - # transform the graph by removing leaves that are not in x | y | z - # until no more leaves can be removed. - leaves = deque([n for n in G_copy.nodes if G_copy.out_degree[n] == 0]) - while len(leaves) > 0: - leaf = leaves.popleft() - if leaf not in union_xyz: - for p in G_copy.predecessors(leaf): - if G_copy.out_degree[p] == 1: - leaves.append(p) - G_copy.remove_node(leaf) - - # transform the graph by removing outgoing edges from the - # conditioning set. - edges_to_remove = list(G_copy.out_edges(z)) - G_copy.remove_edges_from(edges_to_remove) - - # use disjoint-set data structure to check if any node in `x` - # occurs in the same weakly connected component as a node in `y`. - disjoint_set = UnionFind(G_copy.nodes()) - for component in nx.weakly_connected_components(G_copy): - disjoint_set.union(*component) - disjoint_set.union(*x) - disjoint_set.union(*y) - - if x and y and disjoint_set[next(iter(x))] == disjoint_set[next(iter(y))]: - return False - else: - return True + # contains -> and <-> edges from starting node T + forward_deque = deque([]) + forward_visited = set() + + # contains <- and - edges from starting node T + backward_deque = deque(x) + backward_visited = set() + + ancestors_or_z = set().union(*[nx.ancestors(G, node) for node in x]) | z | x + + while forward_deque or backward_deque: + if backward_deque: + node = backward_deque.popleft() + backward_visited.add(node) + if node in y: + return False + if node in z: + continue + + # add <- edges to backward deque + backward_deque.extend(G.pred[node].keys() - backward_visited) + # add -> edges to forward deque + forward_deque.extend(G.succ[node].keys() - forward_visited) + + if forward_deque: + node = forward_deque.popleft() + forward_visited.add(node) + if node in y: + return False + + # Consider if -> node <- is opened due to ancestor of node in z + if node in ancestors_or_z: + # add <- edges to backward deque + backward_deque.extend(G.pred[node].keys() - backward_visited) + if node not in z: + # add -> edges to forward deque + forward_deque.extend(G.succ[node].keys() - forward_visited) + + return True @not_implemented_for("undirected") @nx._dispatch -def minimal_d_separator(G, u, v): - """Compute a minimal d-separating set between 'u' and 'v'. +def find_minimal_d_separator(G, x, y, *, included=None, restricted=None): + """Returns a minimal d-separating set between `x` and `y` if possible - A d-separating set in a DAG is a set of nodes that blocks all paths - between the two nodes, 'u' and 'v'. This function - constructs a d-separating set that is "minimal", meaning it is the smallest - d-separating set for 'u' and 'v'. This is not necessarily - unique. For more details, see Notes. + A d-separating set in a DAG is a set of nodes that blocks all + paths between the two sets of nodes, `x` and `y`. This function + constructs a d-separating set that is "minimal", meaning no nodes can + be removed without it losing the d-separating property for `x` and `y`. + If no d-separating sets exist for `x` and `y`, this returns `None`. + + In a DAG there may be more than one minimal d-separator between two + sets of nodes. Minimal d-separators are not always unique. This function + returns one minimal d-separator, or `None` if no d-separator exists. + + Uses the algorithm presented in [1]_. The complexity of the algorithm + is :math:`O(m)`, where :math:`m` stands for the number of edges in + the subgraph of G consisting of only the ancestors of `x` and `y`. + For full details, see [1]_. Parameters ---------- G : graph A networkx DAG. - u : node - A node in the graph, G. - v : node - A node in the graph, G. + x : set | node + A node or set of nodes in the graph. + y : set | node + A node or set of nodes in the graph. + included : set | node | None + A node or set of nodes which must be included in the found separating set, + default is None, which means the empty set. + restricted : set | node | None + Restricted node or set of nodes to consider. Only these nodes can be in + the found separating set, default is None meaning all nodes in ``G``. + + Returns + ------- + z : set | None + The minimal d-separating set, if at least one d-separating set exists, + otherwise None. Raises ------ NetworkXError - Raises a :exc:`NetworkXError` if the input graph is not a DAG. + Raises a :exc:`NetworkXError` if the input graph is not a DAG + or if node sets `x`, `y`, and `included` are not disjoint. NodeNotFound If any of the input nodes are not found in the graph, @@ -244,89 +389,98 @@ def minimal_d_separator(G, u, v): References ---------- - .. [1] Tian, J., & Paz, A. (1998). Finding Minimal D-separators. - - Notes - ----- - This function only finds ``a`` minimal d-separator. It does not guarantee - uniqueness, since in a DAG there may be more than one minimal d-separator - between two nodes. Moreover, this only checks for minimal separators - between two nodes, not two sets. Finding minimal d-separators between - two sets of nodes is not supported. - - Uses the algorithm presented in [1]_. The complexity of the algorithm - is :math:`O(|E_{An}^m|)`, where :math:`|E_{An}^m|` stands for the - number of edges in the moralized graph of the sub-graph consisting - of only the ancestors of 'u' and 'v'. For full details, see [1]_. - - The algorithm works by constructing the moral graph consisting of just - the ancestors of `u` and `v`. Then it constructs a candidate for - a separating set ``Z'`` from the predecessors of `u` and `v`. - Then BFS is run starting from `u` and marking nodes - found from ``Z'`` and calling those nodes ``Z''``. - Then BFS is run again starting from `v` and marking nodes if they are - present in ``Z''``. Those marked nodes are the returned minimal - d-separating set. - - https://en.wikipedia.org/wiki/Bayesian_network#d-separation + .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding + minimal d-separators in linear time and applications." In + Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020. """ if not nx.is_directed_acyclic_graph(G): raise nx.NetworkXError("graph should be directed acyclic") - union_uv = {u, v} + try: + x = {x} if x in G else x + y = {y} if y in G else y - if any(n not in G.nodes for n in union_uv): - raise nx.NodeNotFound("one or more specified nodes not found in the graph") + if included is None: + included = set() + elif included in G: + included = {included} - # first construct the set of ancestors of X and Y - x_anc = nx.ancestors(G, u) - y_anc = nx.ancestors(G, v) - D_anc_xy = x_anc.union(y_anc) - D_anc_xy.update((u, v)) + if restricted is None: + restricted = set(G) + elif restricted in G: + restricted = {restricted} - # second, construct the moralization of the subgraph of Anc(X,Y) - moral_G = nx.moral_graph(G.subgraph(D_anc_xy)) + set_y = x | y | included | restricted + if set_y - G.nodes: + raise nx.NodeNotFound(f"The node(s) {set_y - G.nodes} are not found in G") + except TypeError: + raise nx.NodeNotFound( + "One of x, y, included or restricted is not a node or set of nodes in G" + ) - # find a separating set Z' in moral_G - Z_prime = set(G.predecessors(u)).union(set(G.predecessors(v))) + if not included <= restricted: + raise nx.NetworkXError( + f"Included nodes {included} must be in restricted nodes {restricted}" + ) - # perform BFS on the graph from 'x' to mark - Z_dprime = _bfs_with_marks(moral_G, u, Z_prime) - Z = _bfs_with_marks(moral_G, v, Z_dprime) - return Z + intersection = x & y or x & included or y & included + if intersection: + raise nx.NetworkXError( + f"The sets x, y, included are not disjoint. Overlap: {intersection}" + ) + + nodeset = x | y | included + ancestors_x_y_included = nodeset.union(*[nx.ancestors(G, node) for node in nodeset]) + + z_init = restricted & (ancestors_x_y_included - (x | y)) + + x_closure = _reachable(G, x, ancestors_x_y_included, z_init) + if x_closure & y: + return None + + z_updated = z_init & (x_closure | included) + y_closure = _reachable(G, y, ancestors_x_y_included, z_updated) + return z_updated & (y_closure | included) @not_implemented_for("undirected") @nx._dispatch -def is_minimal_d_separator(G, u, v, z): - """Determine if a d-separating set is minimal. +def is_minimal_d_separator(G, x, y, z, *, included=None, restricted=None): + """Determine if `z` is a minimal d-separator for `x` and `y`. - A d-separating set, `z`, in a DAG is a set of nodes that blocks - all paths between the two nodes, `u` and `v`. This function - verifies that a set is "minimal", meaning there is no smaller - d-separating set between the two nodes. + A d-separator, `z`, in a DAG is a set of nodes that blocks + all paths from nodes in set `x` to nodes in set `y`. + A minimal d-separator is a d-separator `z` such that removing + any subset of nodes makes it no longer a d-separator. - Note: This function checks whether `z` is a d-separator AND is minimal. - One can use the function `d_separated` to only check if `z` is a d-separator. - See examples below. + Note: This function checks whether `z` is a d-separator AND is + minimal. One can use the function `is_d_separator` to only check if + `z` is a d-separator. See examples below. Parameters ---------- G : nx.DiGraph - The graph. - u : node - A node in the graph. - v : node - A node in the graph. - z : Set of nodes - The set of nodes to check if it is a minimal d-separating set. - The function :func:`d_separated` is called inside this function + A NetworkX DAG. + x : node | set + A node or set of nodes in the graph. + y : node | set + A node or set of nodes in the graph. + z : node | set + The node or set of nodes to check if it is a minimal d-separating set. + The function :func:`is_d_separator` is called inside this function to verify that `z` is in fact a d-separator. + included : set | node | None + A node or set of nodes which must be included in the found separating set, + default is ``None``, which means the empty set. + restricted : set | node | None + Restricted node or set of nodes to consider. Only these nodes can be in + the found separating set, default is ``None`` meaning all nodes in ``G``. Returns ------- bool - Whether or not the set `z` is a d-separator and is also minimal. + Whether or not the set `z` is a minimal d-separator subject to + `restricted` nodes and `included` node constraints. Examples -------- @@ -338,7 +492,7 @@ def is_minimal_d_separator(G, u, v, z): >>> nx.is_minimal_d_separator(G, 0, 2, {1, 3, 4}) False >>> # alternatively, if we only want to check that {1, 3, 4} is a d-separator - >>> nx.d_separated(G, {0}, {4}, {1, 3, 4}) + >>> nx.is_d_separator(G, 0, 2, {1, 3, 4}) True Raises @@ -352,106 +506,217 @@ def is_minimal_d_separator(G, u, v, z): References ---------- - .. [1] Tian, J., & Paz, A. (1998). Finding Minimal D-separators. + .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding + minimal d-separators in linear time and applications." In + Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020. Notes ----- - This function only works on verifying a d-separating set is minimal - between two nodes. To verify that a d-separating set is minimal between - two sets of nodes is not supported. - - Uses algorithm 2 presented in [1]_. The complexity of the algorithm - is :math:`O(|E_{An}^m|)`, where :math:`|E_{An}^m|` stands for the - number of edges in the moralized graph of the sub-graph consisting - of only the ancestors of ``u`` and ``v``. - - The algorithm works by constructing the moral graph consisting of just - the ancestors of `u` and `v`. First, it performs BFS on the moral graph - starting from `u` and marking any nodes it encounters that are part of - the separating set, `z`. If a node is marked, then it does not continue - along that path. In the second stage, BFS with markings is repeated on the - moral graph starting from `v`. If at any stage, any node in `z` is - not marked, then `z` is considered not minimal. If the end of the algorithm - is reached, then `z` is minimal. + This function works on verifying that a set is minimal and + d-separating between two nodes. Uses criterion (a), (b), (c) on + page 4 of [1]_. a) closure(`x`) and `y` are disjoint. b) `z` contains + all nodes from `included` and is contained in the `restricted` + nodes and in the union of ancestors of `x`, `y`, and `included`. + c) the nodes in `z` not in `included` are contained in both + closure(x) and closure(y). The closure of a set is the set of nodes + connected to the set by a directed path in G. + + The complexity is :math:`O(m)`, where :math:`m` stands for the + number of edges in the subgraph of G consisting of only the + ancestors of `x` and `y`. For full details, see [1]_. - - https://en.wikipedia.org/wiki/Bayesian_network#d-separation """ - if not nx.d_separated(G, {u}, {v}, z): - return False - - x_anc = nx.ancestors(G, u) - y_anc = nx.ancestors(G, v) - xy_anc = x_anc.union(y_anc) + if not nx.is_directed_acyclic_graph(G): + raise nx.NetworkXError("graph should be directed acyclic") - # if Z contains any node which is not in ancestors of X or Y - # then it is definitely not minimal - if any(node not in xy_anc for node in z): + try: + x = {x} if x in G else x + y = {y} if y in G else y + z = {z} if z in G else z + + if included is None: + included = set() + elif included in G: + included = {included} + + if restricted is None: + restricted = set(G) + elif restricted in G: + restricted = {restricted} + + set_y = x | y | included | restricted + if set_y - G.nodes: + raise nx.NodeNotFound(f"The node(s) {set_y - G.nodes} are not found in G") + except TypeError: + raise nx.NodeNotFound( + "One of x, y, z, included or restricted is not a node or set of nodes in G" + ) + + if not included <= z: + raise nx.NetworkXError( + f"Included nodes {included} must be in proposed separating set z {x}" + ) + if not z <= restricted: + raise nx.NetworkXError( + f"Separating set {z} must be contained in restricted set {restricted}" + ) + + intersection = x.intersection(y) or x.intersection(z) or y.intersection(z) + if intersection: + raise nx.NetworkXError( + f"The sets are not disjoint, with intersection {intersection}" + ) + + nodeset = x | y | included + ancestors_x_y_included = nodeset.union(*[nx.ancestors(G, n) for n in nodeset]) + + # criterion (a) -- check that z is actually a separator + x_closure = _reachable(G, x, ancestors_x_y_included, z) + if x_closure & y: return False - D_anc_xy = x_anc.union(y_anc) - D_anc_xy.update((u, v)) - - # second, construct the moralization of the subgraph - moral_G = nx.moral_graph(G.subgraph(D_anc_xy)) - - # start BFS from X - marks = _bfs_with_marks(moral_G, u, z) - - # if not all the Z is marked, then the set is not minimal - if any(node not in marks for node in z): + # criterion (b) -- basic constraint; included and restricted already checked above + if not (z <= ancestors_x_y_included): return False - # similarly, start BFS from Y and check the marks - marks = _bfs_with_marks(moral_G, v, z) - # if not all the Z is marked, then the set is not minimal - if any(node not in marks for node in z): + # criterion (c) -- check that z is minimal + y_closure = _reachable(G, y, ancestors_x_y_included, z) + if not ((z - included) <= (x_closure & y_closure)): return False - return True -@not_implemented_for("directed") -def _bfs_with_marks(G, start_node, check_set): - """Breadth-first-search with markings. +@not_implemented_for("undirected") +def _reachable(G, x, a, z): + """Modified Bayes-Ball algorithm for finding d-connected nodes. - Performs BFS starting from ``start_node`` and whenever a node - inside ``check_set`` is met, it is "marked". Once a node is marked, - BFS does not continue along that path. The resulting marked nodes - are returned. + Find all nodes in `a` that are d-connected to those in `x` by + those in `z`. This is an implementation of the function + `REACHABLE` in [1]_ (which is itself a modification of the + Bayes-Ball algorithm [2]_) when restricted to DAGs. Parameters ---------- - G : nx.Graph - An undirected graph. - start_node : node - The start of the BFS. - check_set : set - The set of nodes to check against. + G : nx.DiGraph + A NetworkX DAG. + x : node | set + A node in the DAG, or a set of nodes. + a : node | set + A (set of) node(s) in the DAG containing the ancestors of `x`. + z : node | set + The node or set of nodes conditioned on when checking d-connectedness. Returns ------- - marked : set - A set of nodes that were marked. + w : set + The closure of `x` in `a` with respect to d-connectedness + given `z`. + + References + ---------- + .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding + minimal d-separators in linear time and applications." In + Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020. + + .. [2] Shachter, Ross D. "Bayes-ball: The rational pastime + (for determining irrelevance and requisite information in + belief networks and influence diagrams)." In Proceedings of the + Fourteenth Conference on Uncertainty in Artificial Intelligence + (UAI), (pp. 480–487). 1998. + """ + + def _pass(e, v, f, n): + """Whether a ball entering node `v` along edge `e` passes to `n` along `f`. + + Boolean function defined on page 6 of [1]_. + + Parameters + ---------- + e : bool + Directed edge by which the ball got to node `v`; `True` iff directed into `v`. + v : node + Node where the ball is. + f : bool + Directed edge connecting nodes `v` and `n`; `True` iff directed `n`. + n : node + Checking whether the ball passes to this node. + + Returns + ------- + b : bool + Whether the ball passes or not. + + References + ---------- + .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding + minimal d-separators in linear time and applications." In + Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020. + """ + is_element_of_A = n in a + # almost_definite_status = True # always true for DAGs; not so for RCGs + collider_if_in_Z = v not in z or (e and not f) + return is_element_of_A and collider_if_in_Z # and almost_definite_status + + queue = deque([]) + for node in x: + if bool(G.pred[node]): + queue.append((True, node)) + if bool(G.succ[node]): + queue.append((False, node)) + processed = queue.copy() + + while any(queue): + e, v = queue.popleft() + preds = ((False, n) for n in G.pred[v]) + succs = ((True, n) for n in G.succ[v]) + f_n_pairs = chain(preds, succs) + for f, n in f_n_pairs: + if (f, n) not in processed and _pass(e, v, f, n): + queue.append((f, n)) + processed.append((f, n)) + + return {w for (_, w) in processed} + + +# Deprecated functions: +def d_separated(G, x, y, z): + """Return whether nodes sets ``x`` and ``y`` are d-separated by ``z``. + + .. deprecated:: 3.3 + + This function is deprecated and will be removed in NetworkX v3.5. + Please use `is_d_separator(G, x, y, z)`. + + """ + import warnings + + warnings.warn( + "d_separated is deprecated and will be removed in NetworkX v3.5." + "Please use `is_d_separator(G, x, y, z)`.", + category=DeprecationWarning, + stacklevel=2, + ) + return nx.is_d_separator(G, x, y, z) + + +def minimal_d_separator(G, u, v): + """Returns a minimal_d-separating set between `x` and `y` if possible + + .. deprecated:: 3.3 + + minimal_d_separator is deprecated and will be removed in NetworkX v3.5. + Please use `find_minimal_d_separator(G, x, y)`. + """ - visited = {} - marked = set() - queue = [] - - visited[start_node] = None - queue.append(start_node) - while queue: - m = queue.pop(0) - - for nbr in G.neighbors(m): - if nbr not in visited: - # memoize where we visited so far - visited[nbr] = None - - # mark the node in Z' and do not continue along that path - if nbr in check_set: - marked.add(nbr) - else: - queue.append(nbr) - return marked + import warnings + + warnings.warn( + ( + "This function is deprecated and will be removed in NetworkX v3.5." + "Please use `is_d_separator(G, x, y)`." + ), + category=DeprecationWarning, + stacklevel=2, + ) + return nx.find_minimal_d_separator(G, u, v) diff --git a/networkx/algorithms/tests/test_d_separation.py b/networkx/algorithms/tests/test_d_separation.py index a94d4dd4cfb..6f62971301b 100644 --- a/networkx/algorithms/tests/test_d_separation.py +++ b/networkx/algorithms/tests/test_d_separation.py @@ -81,6 +81,41 @@ def asia_graph_fixture(): return asia_graph() +@pytest.fixture() +def large_collider_graph(): + edge_list = [("A", "B"), ("C", "B"), ("B", "D"), ("D", "E"), ("B", "F"), ("G", "E")] + G = nx.DiGraph(edge_list) + return G + + +@pytest.fixture() +def chain_and_fork_graph(): + edge_list = [("A", "B"), ("B", "C"), ("B", "D"), ("D", "C")] + G = nx.DiGraph(edge_list) + return G + + +@pytest.fixture() +def no_separating_set_graph(): + edge_list = [("A", "B")] + G = nx.DiGraph(edge_list) + return G + + +@pytest.fixture() +def large_no_separating_set_graph(): + edge_list = [("A", "B"), ("C", "A"), ("C", "B")] + G = nx.DiGraph(edge_list) + return G + + +@pytest.fixture() +def collider_trek_graph(): + edge_list = [("A", "B"), ("C", "B"), ("C", "D")] + G = nx.DiGraph(edge_list) + return G + + @pytest.mark.parametrize( "graph", [path_graph(), fork_graph(), collider_graph(), naive_bayes_graph(), asia_graph()], @@ -90,40 +125,40 @@ def test_markov_condition(graph): for node in graph.nodes: parents = set(graph.predecessors(node)) non_descendants = graph.nodes - nx.descendants(graph, node) - {node} - parents - assert nx.d_separated(graph, {node}, non_descendants, parents) + assert nx.is_d_separator(graph, {node}, non_descendants, parents) def test_path_graph_dsep(path_graph): """Example-based test of d-separation for path_graph.""" - assert nx.d_separated(path_graph, {0}, {2}, {1}) - assert not nx.d_separated(path_graph, {0}, {2}, {}) + assert nx.is_d_separator(path_graph, {0}, {2}, {1}) + assert not nx.is_d_separator(path_graph, {0}, {2}, set()) def test_fork_graph_dsep(fork_graph): """Example-based test of d-separation for fork_graph.""" - assert nx.d_separated(fork_graph, {1}, {2}, {0}) - assert not nx.d_separated(fork_graph, {1}, {2}, {}) + assert nx.is_d_separator(fork_graph, {1}, {2}, {0}) + assert not nx.is_d_separator(fork_graph, {1}, {2}, set()) def test_collider_graph_dsep(collider_graph): """Example-based test of d-separation for collider_graph.""" - assert nx.d_separated(collider_graph, {0}, {1}, {}) - assert not nx.d_separated(collider_graph, {0}, {1}, {2}) + assert nx.is_d_separator(collider_graph, {0}, {1}, set()) + assert not nx.is_d_separator(collider_graph, {0}, {1}, {2}) def test_naive_bayes_dsep(naive_bayes_graph): """Example-based test of d-separation for naive_bayes_graph.""" for u, v in combinations(range(1, 5), 2): - assert nx.d_separated(naive_bayes_graph, {u}, {v}, {0}) - assert not nx.d_separated(naive_bayes_graph, {u}, {v}, {}) + assert nx.is_d_separator(naive_bayes_graph, {u}, {v}, {0}) + assert not nx.is_d_separator(naive_bayes_graph, {u}, {v}, set()) def test_asia_graph_dsep(asia_graph): """Example-based test of d-separation for asia_graph.""" - assert nx.d_separated( + assert nx.is_d_separator( asia_graph, {"asia", "smoking"}, {"dyspnea", "xray"}, {"bronchitis", "either"} ) - assert nx.d_separated( + assert nx.is_d_separator( asia_graph, {"tuberculosis", "cancer"}, {"bronchitis"}, {"smoking", "xray"} ) @@ -137,11 +172,11 @@ def test_undirected_graphs_are_not_supported(): """ g = nx.path_graph(3, nx.Graph) with pytest.raises(nx.NetworkXNotImplemented): - nx.d_separated(g, {0}, {1}, {2}) + nx.is_d_separator(g, {0}, {1}, {2}) with pytest.raises(nx.NetworkXNotImplemented): nx.is_minimal_d_separator(g, {0}, {1}, {2}) with pytest.raises(nx.NetworkXNotImplemented): - nx.minimal_d_separator(g, {0}, {1}) + nx.find_minimal_d_separator(g, {0}, {1}) def test_cyclic_graphs_raise_error(): @@ -152,60 +187,128 @@ def test_cyclic_graphs_raise_error(): """ g = nx.cycle_graph(3, nx.DiGraph) with pytest.raises(nx.NetworkXError): - nx.d_separated(g, {0}, {1}, {2}) + nx.is_d_separator(g, {0}, {1}, {2}) with pytest.raises(nx.NetworkXError): - nx.minimal_d_separator(g, 0, 1) + nx.find_minimal_d_separator(g, {0}, {1}) with pytest.raises(nx.NetworkXError): - nx.is_minimal_d_separator(g, 0, 1, {2}) + nx.is_minimal_d_separator(g, {0}, {1}, {2}) def test_invalid_nodes_raise_error(asia_graph): """ Test that graphs that have invalid nodes passed in raise errors. """ + # Check both set and node arguments + with pytest.raises(nx.NodeNotFound): + nx.is_d_separator(asia_graph, {0}, {1}, {2}) + with pytest.raises(nx.NodeNotFound): + nx.is_d_separator(asia_graph, 0, 1, 2) + with pytest.raises(nx.NodeNotFound): + nx.is_minimal_d_separator(asia_graph, {0}, {1}, {2}) with pytest.raises(nx.NodeNotFound): - nx.d_separated(asia_graph, {0}, {1}, {2}) + nx.is_minimal_d_separator(asia_graph, 0, 1, 2) with pytest.raises(nx.NodeNotFound): - nx.is_minimal_d_separator(asia_graph, 0, 1, {2}) + nx.find_minimal_d_separator(asia_graph, {0}, {1}) with pytest.raises(nx.NodeNotFound): - nx.minimal_d_separator(asia_graph, 0, 1) + nx.find_minimal_d_separator(asia_graph, 0, 1) -def test_minimal_d_separator(): +def test_nondisjoint_node_sets_raise_error(collider_graph): + """ + Test that error is raised when node sets aren't disjoint. + """ + with pytest.raises(nx.NetworkXError): + nx.is_d_separator(collider_graph, 0, 1, 0) + with pytest.raises(nx.NetworkXError): + nx.is_d_separator(collider_graph, 0, 2, 0) + with pytest.raises(nx.NetworkXError): + nx.is_d_separator(collider_graph, 0, 0, 1) + with pytest.raises(nx.NetworkXError): + nx.is_d_separator(collider_graph, 1, 0, 0) + with pytest.raises(nx.NetworkXError): + nx.find_minimal_d_separator(collider_graph, 0, 0) + with pytest.raises(nx.NetworkXError): + nx.find_minimal_d_separator(collider_graph, 0, 1, included=0) + with pytest.raises(nx.NetworkXError): + nx.find_minimal_d_separator(collider_graph, 1, 0, included=0) + with pytest.raises(nx.NetworkXError): + nx.is_minimal_d_separator(collider_graph, 0, 0, set()) + with pytest.raises(nx.NetworkXError): + nx.is_minimal_d_separator(collider_graph, 0, 1, set(), included=0) + with pytest.raises(nx.NetworkXError): + nx.is_minimal_d_separator(collider_graph, 1, 0, set(), included=0) + + +def test_is_minimal_d_separator( + large_collider_graph, + chain_and_fork_graph, + no_separating_set_graph, + large_no_separating_set_graph, + collider_trek_graph, +): # Case 1: # create a graph A -> B <- C # B -> D -> E; # B -> F; # G -> E; - edge_list = [("A", "B"), ("C", "B"), ("B", "D"), ("D", "E"), ("B", "F"), ("G", "E")] - G = nx.DiGraph(edge_list) - assert not nx.d_separated(G, {"B"}, {"E"}, set()) + assert not nx.is_d_separator(large_collider_graph, {"B"}, {"E"}, set()) # minimal set of the corresponding graph # for B and E should be (D,) - Zmin = nx.minimal_d_separator(G, "B", "E") - - # the minimal separating set should pass the test for minimality - assert nx.is_minimal_d_separator(G, "B", "E", Zmin) + Zmin = nx.find_minimal_d_separator(large_collider_graph, "B", "E") + # check that the minimal d-separator is a d-separating set + assert nx.is_d_separator(large_collider_graph, "B", "E", Zmin) + # the minimal separating set should also pass the test for minimality + assert nx.is_minimal_d_separator(large_collider_graph, "B", "E", Zmin) + # function should also work with set arguments + assert nx.is_minimal_d_separator(large_collider_graph, {"A", "B"}, {"G", "E"}, Zmin) assert Zmin == {"D"} # Case 2: # create a graph A -> B -> C # B -> D -> C; - edge_list = [("A", "B"), ("B", "C"), ("B", "D"), ("D", "C")] - G = nx.DiGraph(edge_list) - assert not nx.d_separated(G, {"A"}, {"C"}, set()) - Zmin = nx.minimal_d_separator(G, "A", "C") + assert not nx.is_d_separator(chain_and_fork_graph, {"A"}, {"C"}, set()) + Zmin = nx.find_minimal_d_separator(chain_and_fork_graph, "A", "C") # the minimal separating set should pass the test for minimality - assert nx.is_minimal_d_separator(G, "A", "C", Zmin) + assert nx.is_minimal_d_separator(chain_and_fork_graph, "A", "C", Zmin) assert Zmin == {"B"} - Znotmin = Zmin.union({"D"}) - assert not nx.is_minimal_d_separator(G, "A", "C", Znotmin) + assert not nx.is_minimal_d_separator(chain_and_fork_graph, "A", "C", Znotmin) + + # Case 3: + # create a graph A -> B + + # there is no m-separating set between A and B at all, so + # no minimal m-separating set can exist + assert not nx.is_d_separator(no_separating_set_graph, {"A"}, {"B"}, set()) + assert nx.find_minimal_d_separator(no_separating_set_graph, "A", "B") is None + + # Case 4: + # create a graph A -> B with A <- C -> B + + # there is no m-separating set between A and B at all, so + # no minimal m-separating set can exist + # however, the algorithm will initially propose C as a + # minimal (but invalid) separating set + assert not nx.is_d_separator(large_no_separating_set_graph, {"A"}, {"B"}, {"C"}) + assert nx.find_minimal_d_separator(large_no_separating_set_graph, "A", "B") is None + + # Test `included` and `excluded` args + # create graph A -> B <- C -> D + assert nx.find_minimal_d_separator(collider_trek_graph, "A", "D", included="B") == { + "B", + "C", + } + assert ( + nx.find_minimal_d_separator( + collider_trek_graph, "A", "D", included="B", restricted="B" + ) + is None + ) -def test_minimal_d_separator_checks_dsep(): +def test_is_minimal_d_separator_checks_dsep(): """Test that is_minimal_d_separator checks for d-separation as well.""" g = nx.DiGraph() g.add_edges_from( @@ -221,8 +324,25 @@ def test_minimal_d_separator_checks_dsep(): ] ) - assert not nx.d_separated(g, {"C"}, {"F"}, {"D"}) + assert not nx.is_d_separator(g, {"C"}, {"F"}, {"D"}) # since {'D'} and {} are not d-separators, we return false assert not nx.is_minimal_d_separator(g, "C", "F", {"D"}) - assert not nx.is_minimal_d_separator(g, "C", "F", {}) + assert not nx.is_minimal_d_separator(g, "C", "F", set()) + + +def test__reachable(large_collider_graph): + reachable = nx.algorithms.d_separation._reachable + g = large_collider_graph + x = {"F", "D"} + ancestors = {"A", "B", "C", "D", "F"} + assert reachable(g, x, ancestors, {"B"}) == {"B", "F", "D"} + assert reachable(g, x, ancestors, set()) == ancestors + + +def test_deprecations(): + G = nx.DiGraph([(0, 1), (1, 2)]) + with pytest.deprecated_call(): + nx.d_separated(G, 0, 2, {1}) + with pytest.deprecated_call(): + z = nx.minimal_d_separator(G, 0, 2) diff --git a/networkx/conftest.py b/networkx/conftest.py index 41f4789c330..9a25495d580 100644 --- a/networkx/conftest.py +++ b/networkx/conftest.py @@ -133,6 +133,12 @@ def set_warnings(): warnings.filterwarnings( "ignore", category=DeprecationWarning, message="\n\nrandom_triad" ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="minimal_d_separator" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="d_separated" + ) warnings.filterwarnings("ignore", category=DeprecationWarning, message="\n\nk_core") warnings.filterwarnings( "ignore", category=DeprecationWarning, message="\n\nk_shell" From 1168afca385dab8cbda8be3aa479a22581e94988 Mon Sep 17 00:00:00 2001 From: Navya Agarwal <82928853+navyagarwal@users.noreply.github.com> Date: Fri, 22 Dec 2023 20:05:16 +0530 Subject: [PATCH 019/129] Fix all_node_cuts output for complete graphs (#6558) * Fix for complete graphs case in all_node_cuts * Check node_connectivty in complete graph * test for broken cuts. add fix --------- Co-authored-by: Dan Schult --- networkx/algorithms/connectivity/kcutsets.py | 7 ++++--- .../connectivity/tests/test_kcutsets.py | 17 ++++++++++++----- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/networkx/algorithms/connectivity/kcutsets.py b/networkx/algorithms/connectivity/kcutsets.py index bc04ed18589..f2036faf1e1 100644 --- a/networkx/algorithms/connectivity/kcutsets.py +++ b/networkx/algorithms/connectivity/kcutsets.py @@ -93,10 +93,11 @@ def all_node_cuts(G, k=None, flow_func=None): # Address some corner cases first. # For complete Graphs + if nx.density(G) == 1: - for cut_set in combinations(G, len(G) - 1): - yield set(cut_set) + yield from () return + # Initialize data structures. # Keep track of the cuts already computed so we do not repeat them. seen = [] @@ -130,7 +131,7 @@ def all_node_cuts(G, k=None, flow_func=None): for x in X: # step 3: Compute local connectivity flow of x with all other # non adjacent nodes in G - non_adjacent = set(G) - X - set(G[x]) + non_adjacent = set(G) - {x} - set(G[x]) for v in non_adjacent: # step 4: compute maximum flow in an Even-Tarjan reduction H of G # and step 5: build the associated residual network R diff --git a/networkx/algorithms/connectivity/tests/test_kcutsets.py b/networkx/algorithms/connectivity/tests/test_kcutsets.py index d5b3b089e23..4b4b5494a87 100644 --- a/networkx/algorithms/connectivity/tests/test_kcutsets.py +++ b/networkx/algorithms/connectivity/tests/test_kcutsets.py @@ -259,8 +259,15 @@ def test_cycle_graph(): def test_complete_graph(): G = nx.complete_graph(5) - solution = [{0, 1, 2, 3}, {0, 1, 2, 4}, {0, 1, 3, 4}, {0, 2, 3, 4}, {1, 2, 3, 4}] - cuts = list(nx.all_node_cuts(G)) - assert len(solution) == len(cuts) - for cut in cuts: - assert cut in solution + assert nx.node_connectivity(G) == 4 + assert list(nx.all_node_cuts(G)) == [] + + +def test_all_node_cuts_simple_case(): + G = nx.complete_graph(5) + G.remove_edges_from([(0, 1), (3, 4)]) + expected = [{0, 1, 2}, {2, 3, 4}] + actual = list(nx.all_node_cuts(G)) + assert len(actual) == len(expected) + for cut in actual: + assert cut in expected From 0762b7f30d1e315dc8c10029a59cf73148446ea2 Mon Sep 17 00:00:00 2001 From: Sadra Barikbin Date: Sat, 30 Dec 2023 18:59:15 +0330 Subject: [PATCH 020/129] Fix a tiny typo in `structuralholes.py::local_constraint` docstring (#7198) Update structuralholes.py Fix a tiny typo in local_constraint docstring. --- networkx/algorithms/structuralholes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/networkx/algorithms/structuralholes.py b/networkx/algorithms/structuralholes.py index c676177b38e..6d4f33171dd 100644 --- a/networkx/algorithms/structuralholes.py +++ b/networkx/algorithms/structuralholes.py @@ -229,7 +229,7 @@ def local_constraint(G, u, v, weight=None): the node ``v`` in the graph ``G``. Formally, the *local constraint on u with respect to v*, denoted - $\ell(v)$, is defined by + $\ell(u, v)$, is defined by .. math:: From 6109e10c31f2c9beb7e3370cca26af6878acf222 Mon Sep 17 00:00:00 2001 From: Aditi Juneja <91629733+Schefflera-Arboricola@users.noreply.github.com> Date: Sat, 30 Dec 2023 21:10:27 +0530 Subject: [PATCH 021/129] Added `subgraph_is_monomorphic` and `subgraph_monomorphisms_iter` in docs (#7197) issue7170 --- doc/reference/algorithms/isomorphism.vf2.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/reference/algorithms/isomorphism.vf2.rst b/doc/reference/algorithms/isomorphism.vf2.rst index a5c2ad4206d..95b62de661a 100644 --- a/doc/reference/algorithms/isomorphism.vf2.rst +++ b/doc/reference/algorithms/isomorphism.vf2.rst @@ -17,8 +17,10 @@ Graph Matcher GraphMatcher.initialize GraphMatcher.is_isomorphic GraphMatcher.subgraph_is_isomorphic + GraphMatcher.subgraph_is_monomorphic GraphMatcher.isomorphisms_iter GraphMatcher.subgraph_isomorphisms_iter + GraphMatcher.subgraph_monomorphisms_iter GraphMatcher.candidate_pairs_iter GraphMatcher.match GraphMatcher.semantic_feasibility @@ -36,8 +38,10 @@ DiGraph Matcher DiGraphMatcher.initialize DiGraphMatcher.is_isomorphic DiGraphMatcher.subgraph_is_isomorphic + DiGraphMatcher.subgraph_is_monomorphic DiGraphMatcher.isomorphisms_iter DiGraphMatcher.subgraph_isomorphisms_iter + DiGraphMatcher.subgraph_monomorphisms_iter DiGraphMatcher.candidate_pairs_iter DiGraphMatcher.match DiGraphMatcher.semantic_feasibility From e755ea77eef9a6325d8f3edfbea959a3dd96ae72 Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Tue, 2 Jan 2024 07:06:36 -0600 Subject: [PATCH 022/129] Fix online docs for `_dispatch` (#7194) * Fix online docs for `_dispatch` * whatcha talkin bout mypy? --- networkx/utils/backends.py | 121 +++++++++++++++++++------------------ 1 file changed, 63 insertions(+), 58 deletions(-) diff --git a/networkx/utils/backends.py b/networkx/utils/backends.py index c2d5661e034..57935640198 100644 --- a/networkx/utils/backends.py +++ b/networkx/utils/backends.py @@ -146,64 +146,6 @@ def _load_backend(backend_name): class _dispatch: - """Dispatches to a backend algorithm based on input graph types. - - Parameters - ---------- - func : function - - name : str, optional - The name of the algorithm to use for dispatching. If not provided, - the name of ``func`` will be used. ``name`` is useful to avoid name - conflicts, as all dispatched algorithms live in a single namespace. - - graphs : str or dict or None, default "G" - If a string, the parameter name of the graph, which must be the first - argument of the wrapped function. If more than one graph is required - for the algorithm (or if the graph is not the first argument), provide - a dict of parameter name to argument position for each graph argument. - For example, ``@_dispatch(graphs={"G": 0, "auxiliary?": 4})`` - indicates the 0th parameter ``G`` of the function is a required graph, - and the 4th parameter ``auxiliary`` is an optional graph. - To indicate an argument is a list of graphs, do e.g. ``"[graphs]"``. - Use ``graphs=None`` if *no* arguments are NetworkX graphs such as for - graph generators, readers, and conversion functions. - - edge_attrs : str or dict, optional - ``edge_attrs`` holds information about edge attribute arguments - and default values for those edge attributes. - If a string, ``edge_attrs`` holds the function argument name that - indicates a single edge attribute to include in the converted graph. - The default value for this attribute is 1. To indicate that an argument - is a list of attributes (all with default value 1), use e.g. ``"[attrs]"``. - If a dict, ``edge_attrs`` holds a dict keyed by argument names, with - values that are either the default value or, if a string, the argument - name that indicates the default value. - - node_attrs : str or dict, optional - Like ``edge_attrs``, but for node attributes. - - preserve_edge_attrs : bool or str or dict, optional - For bool, whether to preserve all edge attributes. - For str, the parameter name that may indicate (with ``True`` or a - callable argument) whether all edge attributes should be preserved - when converting. - For dict of ``{graph_name: {attr: default}}``, indicate pre-determined - edge attributes (and defaults) to preserve for input graphs. - - preserve_node_attrs : bool or str or dict, optional - Like ``preserve_edge_attrs``, but for node attributes. - - preserve_graph_attrs : bool or set - For bool, whether to preserve all graph attributes. - For set, which input graph arguments to preserve graph attributes. - - preserve_all_attrs : bool - Whether to preserve all edge, node and graph attributes. - This overrides all the other preserve_*_attrs. - - """ - # Allow any of the following decorator forms: # - @_dispatch # - @_dispatch() @@ -238,6 +180,63 @@ def __new__( preserve_graph_attrs=False, preserve_all_attrs=False, ): + """Dispatches to a backend algorithm based on input graph types. + + Parameters + ---------- + func : function + + name : str, optional + The name of the algorithm to use for dispatching. If not provided, + the name of ``func`` will be used. ``name`` is useful to avoid name + conflicts, as all dispatched algorithms live in a single namespace. + + graphs : str or dict or None, default "G" + If a string, the parameter name of the graph, which must be the first + argument of the wrapped function. If more than one graph is required + for the algorithm (or if the graph is not the first argument), provide + a dict of parameter name to argument position for each graph argument. + For example, ``@_dispatch(graphs={"G": 0, "auxiliary?": 4})`` + indicates the 0th parameter ``G`` of the function is a required graph, + and the 4th parameter ``auxiliary`` is an optional graph. + To indicate an argument is a list of graphs, do e.g. ``"[graphs]"``. + Use ``graphs=None`` if *no* arguments are NetworkX graphs such as for + graph generators, readers, and conversion functions. + + edge_attrs : str or dict, optional + ``edge_attrs`` holds information about edge attribute arguments + and default values for those edge attributes. + If a string, ``edge_attrs`` holds the function argument name that + indicates a single edge attribute to include in the converted graph. + The default value for this attribute is 1. To indicate that an argument + is a list of attributes (all with default value 1), use e.g. ``"[attrs]"``. + If a dict, ``edge_attrs`` holds a dict keyed by argument names, with + values that are either the default value or, if a string, the argument + name that indicates the default value. + + node_attrs : str or dict, optional + Like ``edge_attrs``, but for node attributes. + + preserve_edge_attrs : bool or str or dict, optional + For bool, whether to preserve all edge attributes. + For str, the parameter name that may indicate (with ``True`` or a + callable argument) whether all edge attributes should be preserved + when converting. + For dict of ``{graph_name: {attr: default}}``, indicate pre-determined + edge attributes (and defaults) to preserve for input graphs. + + preserve_node_attrs : bool or str or dict, optional + Like ``preserve_edge_attrs``, but for node attributes. + + preserve_graph_attrs : bool or set + For bool, whether to preserve all graph attributes. + For set, which input graph arguments to preserve graph attributes. + + preserve_all_attrs : bool + Whether to preserve all edge, node and graph attributes. + This overrides all the other preserve_*_attrs. + + """ if func is None: return partial( _dispatch, @@ -1016,3 +1015,9 @@ def _dispatch(func=None, **kwargs): # type: ignore[no-redef] dispatched_func = _orig_dispatch(func, **kwargs) func.__doc__ = dispatched_func.__doc__ return func + + _dispatch.__doc__ = _orig_dispatch.__new__.__doc__ # type: ignore[method-assign,assignment] + _sig = inspect.signature(_orig_dispatch.__new__) + _dispatch.__signature__ = _sig.replace( # type: ignore[method-assign,assignment] + parameters=[v for k, v in _sig.parameters.items() if k != "cls"] + ) From 3a98973a942066ffaa300fba1d884d92eaee8128 Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Tue, 2 Jan 2024 07:07:51 -0600 Subject: [PATCH 023/129] Remove `"networkx.plugins"` and `"networkx.plugin_info"` entry-points (#7192) Use `"networkx.backends"` and `"networkx.backend_info"` instead. Version 3.2 supported both options. The next release can drop "plugin". --- networkx/utils/backends.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/networkx/utils/backends.py b/networkx/utils/backends.py index 57935640198..6c0e5649bd3 100644 --- a/networkx/utils/backends.py +++ b/networkx/utils/backends.py @@ -124,12 +124,8 @@ def _get_backends(group, *, load_and_call=False): return rv -# Rename "plugin" to "backend", and give backends a release cycle to update. -backends = _get_backends("networkx.plugins") -backend_info = _get_backends("networkx.plugin_info", load_and_call=True) - -backends.update(_get_backends("networkx.backends")) -backend_info.update(_get_backends("networkx.backend_info", load_and_call=True)) +backends = _get_backends("networkx.backends") +backend_info = _get_backends("networkx.backend_info", load_and_call=True) # Load and cache backends on-demand _loaded_backends = {} # type: ignore[var-annotated] From ad92646029cee542bf6a9313fd7b093302a49aa2 Mon Sep 17 00:00:00 2001 From: Aditi Juneja <91629733+Schefflera-Arboricola@users.noreply.github.com> Date: Tue, 2 Jan 2024 18:42:14 +0530 Subject: [PATCH 024/129] DOC : Updated docs for panther_similarity (#7175) * DOC : Updated panther_similarity * style fix * removed eg * Update similarity.py --- networkx/algorithms/similarity.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/networkx/algorithms/similarity.py b/networkx/algorithms/similarity.py index 0ad6de6e6fe..ec222d23d8a 100644 --- a/networkx/algorithms/similarity.py +++ b/networkx/algorithms/similarity.py @@ -1532,7 +1532,7 @@ def panther_similarity( source : node Source node for which to find the top `k` similar other nodes k : int (default = 5) - The number of most similar nodes to return + The number of most similar nodes to return. path_length : int (default = 5) How long the randomly generated paths should be (``T`` in [1]_) c : float (default = 0.5) @@ -1554,7 +1554,8 @@ def panther_similarity( similarity : dictionary Dictionary of nodes to similarity scores (as floats). Note: the self-similarity (i.e., ``v``) will not be included in - the returned dictionary. + the returned dictionary. So, for ``k = 5``, a dictionary of + top 4 nodes and their similarity scores will be returned. Raises ------- From 62288244a7356db0d7def7bd532d05457f567301 Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Tue, 2 Jan 2024 07:13:26 -0600 Subject: [PATCH 025/129] Fix warnings when building docs (#7195) --- doc/developer/projects.rst | 2 +- .../algorithms/community/label_propagation.py | 18 +++++++++--------- networkx/algorithms/distance_measures.py | 2 +- networkx/algorithms/similarity.py | 6 +++--- networkx/classes/function.py | 2 +- networkx/generators/expanders.py | 4 ++-- 6 files changed, 17 insertions(+), 17 deletions(-) diff --git a/doc/developer/projects.rst b/doc/developer/projects.rst index 623a9986b8b..fbd3b4e2c51 100644 --- a/doc/developer/projects.rst +++ b/doc/developer/projects.rst @@ -89,7 +89,7 @@ Centrality Atlas of network centrality measures. Centrality is a central concept in network science and has many applications across domains. NetworkX provides many functions for measuring - various types of :doc:`network centrality`. + various types of :doc:`network centrality`. The individual centrality functions are typically well-described by their docstrings (though there's always room for improvement!); however, there currently is no big-picture overview of centrality. diff --git a/networkx/algorithms/community/label_propagation.py b/networkx/algorithms/community/label_propagation.py index 5d799d3de5c..307dfbe3a42 100644 --- a/networkx/algorithms/community/label_propagation.py +++ b/networkx/algorithms/community/label_propagation.py @@ -35,21 +35,21 @@ def fast_label_propagation_communities(G, *, weight=None, seed=None): Parameters ---------- G : Graph, DiGraph, MultiGraph, or MultiDiGraph - Any NetworkX graph. + Any NetworkX graph. weight : string, or None (default) - The edge attribute representing a non-negative weight of an edge. If None, - each edge is assumed to have weight one. The weight of an edge is used in - determining the frequency with which a label appears among the neighbors of - a node (edge with weight `w` is equivalent to `w` unweighted edges). + The edge attribute representing a non-negative weight of an edge. If None, + each edge is assumed to have weight one. The weight of an edge is used in + determining the frequency with which a label appears among the neighbors of + a node (edge with weight `w` is equivalent to `w` unweighted edges). seed : integer, random_state, or None (default) - Indicator of random number generation state. See :ref:`Randomness`. + Indicator of random number generation state. See :ref:`Randomness`. Returns ------- communities : iterable - Iterable of communities given as sets of nodes. + Iterable of communities given as sets of nodes. Notes ----- @@ -59,8 +59,8 @@ def fast_label_propagation_communities(G, *, weight=None, seed=None): References ---------- .. [1] Vincent A. Traag & Lovro Šubelj. "Large network community detection by - fast label propagation." Scientific Reports 13 (2023): 2701. - https://doi.org/10.1038/s41598-023-29610-z + fast label propagation." Scientific Reports 13 (2023): 2701. + https://doi.org/10.1038/s41598-023-29610-z """ # Queue of nodes to be processed. diff --git a/networkx/algorithms/distance_measures.py b/networkx/algorithms/distance_measures.py index 04f88b17b6a..844e85f335b 100644 --- a/networkx/algorithms/distance_measures.py +++ b/networkx/algorithms/distance_measures.py @@ -798,7 +798,7 @@ def effective_graph_resistance(G, weight=None, invert_weight=True): The effective graph resistance of `G`. Raises - ------- + ------ NetworkXNotImplemented If `G` is a directed graph. diff --git a/networkx/algorithms/similarity.py b/networkx/algorithms/similarity.py index ec222d23d8a..3a1cfff8147 100644 --- a/networkx/algorithms/similarity.py +++ b/networkx/algorithms/similarity.py @@ -1276,7 +1276,7 @@ def simrank(G, u, v): the similarity value for the given pair of nodes. Raises - ------- + ------ ExceededMaxIterations If the algorithm does not converge within ``max_iterations``. @@ -1558,7 +1558,7 @@ def panther_similarity( top 4 nodes and their similarity scores will be returned. Raises - ------- + ------ NetworkXUnfeasible If `source` is an isolated node. @@ -1566,7 +1566,7 @@ def panther_similarity( If `source` is not in `G`. Notes - ------- + ----- The isolated nodes in `G` are ignored. Examples diff --git a/networkx/classes/function.py b/networkx/classes/function.py index 3f40eab1583..2746d56d715 100644 --- a/networkx/classes/function.py +++ b/networkx/classes/function.py @@ -80,7 +80,7 @@ def degree(G, nbunch=None, weight=None): def neighbors(G, n): """Returns an iterator over all neighbors of node n. - This function wraps the :func:``G.neighbors ` function. + This function wraps the :func:`G.neighbors ` function. """ return G.neighbors(n) diff --git a/networkx/generators/expanders.py b/networkx/generators/expanders.py index efac382c35f..2385682be58 100644 --- a/networkx/generators/expanders.py +++ b/networkx/generators/expanders.py @@ -340,7 +340,7 @@ def is_regular_expander(G, *, epsilon=0): the Alon-Boppana bound and given by $\lambda = 2 \sqrt{d - 1} + \epsilon$. [2]_ - In the case where $\epsilon = 0 $ then if the graph successfully passes the test + In the case where $\epsilon = 0$ then if the graph successfully passes the test it is a Ramanujan graph. [3]_ A Ramanujan graph has spectral gap almost as large as possible, which makes them @@ -404,7 +404,7 @@ def random_regular_expander_graph(n, d, *, epsilon=0, create_using=None, max_tri More precisely the returned graph is a $(n, d, \lambda)$-expander with $\lambda = 2 \sqrt{d - 1} + \epsilon$, close to the Alon-Boppana bound. [2]_ - In the case where $\epsilon = 0 $ it returns a Ramanujan graph. + In the case where $\epsilon = 0$ it returns a Ramanujan graph. A Ramanujan graph has spectral gap almost as large as possible, which makes them excellent expanders. [3]_ From 3394571321815993d52d6e8c1c0ef062c6dedf0b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 11:16:49 -0800 Subject: [PATCH 026/129] Bump actions/setup-python from 4 to 5 (#7201) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/coverage.yml | 2 +- .github/workflows/deploy-docs.yml | 2 +- .github/workflows/lint.yml | 2 +- .github/workflows/mypy.yml | 2 +- .github/workflows/nightly-release-test.yml | 2 +- .github/workflows/nightly.yml | 2 +- .github/workflows/pytest-randomly.yml | 2 +- .github/workflows/release.yml | 2 +- .github/workflows/test.yml | 10 +++++----- 9 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 5b75cfbcfdc..a7ffdea017b 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -19,7 +19,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml index d26df0bed6e..1bc885907b3 100644 --- a/.github/workflows/deploy-docs.yml +++ b/.github/workflows/deploy-docs.yml @@ -14,7 +14,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.11" diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 7f43b045f92..2634e5c1258 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -17,7 +17,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index a025509cb0a..156841681a5 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -17,7 +17,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/nightly-release-test.yml b/.github/workflows/nightly-release-test.yml index f9f1a8b7f25..94d9c38e689 100644 --- a/.github/workflows/nightly-release-test.yml +++ b/.github/workflows/nightly-release-test.yml @@ -14,7 +14,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 43084700c25..b274f5c7f21 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install Python requirements diff --git a/.github/workflows/pytest-randomly.yml b/.github/workflows/pytest-randomly.yml index 162ad4189a2..683e630ad88 100644 --- a/.github/workflows/pytest-randomly.yml +++ b/.github/workflows/pytest-randomly.yml @@ -11,7 +11,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index accb03ae86c..91724b5b8e4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -19,7 +19,7 @@ jobs: with: fetch-depth: 0 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 name: Install Python with: python-version: "3.11" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 80d23aad3be..5d6b0113aff 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -16,7 +16,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -40,7 +40,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -66,7 +66,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -90,7 +90,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -150,7 +150,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} From 73c655270ee22ae06b60c5a6c4a03d1fdf202fc2 Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Thu, 4 Jan 2024 09:02:56 -0800 Subject: [PATCH 027/129] Update test suite for Pytest v8 (#7203) * Rm deprecated_call filter from strongly connected raises test. * Replace deprecated pytest.warns(None) with catch_warnings. --- .../algorithms/components/tests/test_strongly_connected.py | 3 +-- networkx/drawing/tests/test_agraph.py | 3 ++- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/networkx/algorithms/components/tests/test_strongly_connected.py b/networkx/algorithms/components/tests/test_strongly_connected.py index 6d4ea855229..21d9e671898 100644 --- a/networkx/algorithms/components/tests/test_strongly_connected.py +++ b/networkx/algorithms/components/tests/test_strongly_connected.py @@ -183,8 +183,7 @@ def test_connected_raise(self): with pytest.raises(NetworkXNotImplemented): next(nx.kosaraju_strongly_connected_components(G)) with pytest.raises(NetworkXNotImplemented): - with pytest.deprecated_call(): - next(nx.strongly_connected_components_recursive(G)) + next(nx.strongly_connected_components_recursive(G)) pytest.raises(NetworkXNotImplemented, nx.is_strongly_connected, G) pytest.raises(NetworkXNotImplemented, nx.condensation, G) diff --git a/networkx/drawing/tests/test_agraph.py b/networkx/drawing/tests/test_agraph.py index 1ade719bc2b..fd99daaf757 100644 --- a/networkx/drawing/tests/test_agraph.py +++ b/networkx/drawing/tests/test_agraph.py @@ -1,6 +1,7 @@ """Unit tests for PyGraphviz interface.""" import os import tempfile +import warnings import pytest @@ -249,6 +250,6 @@ def test_no_warnings_raised(self): G.add_node(0, pos=(0, 0)) G.add_node(1, pos=(1, 1)) A = nx.nx_agraph.to_agraph(G) - with pytest.warns(None) as record: + with warnings.catch_warnings(record=True) as record: A.layout() assert len(record) == 0 From 5d2f19b876f4d73bbbee371c5279f48a4c615d23 Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Thu, 4 Jan 2024 10:57:16 -0800 Subject: [PATCH 028/129] Undeprecate ``nx_pydot`` now that pydot is actively maintained again (#7204) * Un-xfail pydot tests. * Pin pydot minver to 2.0. * Rm deprecation bullet from devdocs. * Back out deprecation warnings for nx_pydot. --------- Co-authored-by: Jarrod Millman --- doc/developer/deprecations.rst | 1 - networkx/conftest.py | 3 -- networkx/drawing/nx_pydot.py | 43 ---------------------------- networkx/drawing/tests/test_pydot.py | 1 - pyproject.toml | 2 +- requirements/extra.txt | 2 +- 6 files changed, 2 insertions(+), 50 deletions(-) diff --git a/doc/developer/deprecations.rst b/doc/developer/deprecations.rst index 8dfbccdc7ca..fffe9ecef2f 100644 --- a/doc/developer/deprecations.rst +++ b/doc/developer/deprecations.rst @@ -43,7 +43,6 @@ Make sure to review ``networkx/conftest.py`` after removing deprecated code. Version 3.3 ~~~~~~~~~~~ -* Remove pydot functionality ``drawing/nx_pydot.py``, if pydot is still not being maintained. See #5723 * Remove the ``forest_str`` function from ``readwrite/text.py``. Replace existing usages with ``write_network_text``. diff --git a/networkx/conftest.py b/networkx/conftest.py index 9a25495d580..8b1eeb5f97b 100644 --- a/networkx/conftest.py +++ b/networkx/conftest.py @@ -89,9 +89,6 @@ def pytest_collection_modifyitems(config, items): # TODO: The warnings below need to be dealt with, but for now we silence them. @pytest.fixture(autouse=True) def set_warnings(): - warnings.filterwarnings( - "ignore", category=DeprecationWarning, message="nx.nx_pydot" - ) warnings.filterwarnings( "ignore", category=FutureWarning, diff --git a/networkx/drawing/nx_pydot.py b/networkx/drawing/nx_pydot.py index 1cd17818373..4e6a4dccf52 100644 --- a/networkx/drawing/nx_pydot.py +++ b/networkx/drawing/nx_pydot.py @@ -19,7 +19,6 @@ - Graphviz: https://www.graphviz.org - DOT Language: http://www.graphviz.org/doc/info/lang.html """ -import warnings from locale import getpreferredencoding import networkx as nx @@ -41,13 +40,6 @@ def write_dot(G, path): Path can be a string or a file handle. """ - msg = ( - "nx.nx_pydot.write_dot depends on the pydot package, which has " - "known issues and is not actively maintained. Consider using " - "nx.nx_agraph.write_dot instead.\n\n" - "See https://github.com/networkx/networkx/issues/5723" - ) - warnings.warn(msg, DeprecationWarning, stacklevel=2) P = to_pydot(G) path.write(P.to_string()) return @@ -79,14 +71,6 @@ def read_dot(path): """ import pydot - msg = ( - "nx.nx_pydot.read_dot depends on the pydot package, which has " - "known issues and is not actively maintained. Consider using " - "nx.nx_agraph.read_dot instead.\n\n" - "See https://github.com/networkx/networkx/issues/5723" - ) - warnings.warn(msg, DeprecationWarning, stacklevel=2) - data = path.read() # List of one or more "pydot.Dot" instances deserialized from this file. @@ -120,12 +104,6 @@ def from_pydot(P): >>> G = nx.Graph(nx.nx_pydot.from_pydot(A)) """ - msg = ( - "nx.nx_pydot.from_pydot depends on the pydot package, which has " - "known issues and is not actively maintained.\n\n" - "See https://github.com/networkx/networkx/issues/5723" - ) - warnings.warn(msg, DeprecationWarning, stacklevel=2) if P.get_strict(None): # pydot bug: get_strict() shouldn't take argument multiedges = False @@ -220,13 +198,6 @@ def to_pydot(N): """ import pydot - msg = ( - "nx.nx_pydot.to_pydot depends on the pydot package, which has " - "known issues and is not actively maintained.\n\n" - "See https://github.com/networkx/networkx/issues/5723" - ) - warnings.warn(msg, DeprecationWarning, stacklevel=2) - # set Graphviz graph type if N.is_directed(): graph_type = "digraph" @@ -348,14 +319,6 @@ def graphviz_layout(G, prog="neato", root=None): ----- This is a wrapper for pydot_layout. """ - msg = ( - "nx.nx_pydot.graphviz_layout depends on the pydot package, which has " - "known issues and is not actively maintained. Consider using " - "nx.nx_agraph.graphviz_layout instead.\n\n" - "See https://github.com/networkx/networkx/issues/5723" - ) - warnings.warn(msg, DeprecationWarning, stacklevel=2) - return pydot_layout(G=G, prog=prog, root=root) @@ -399,12 +362,6 @@ def pydot_layout(G, prog="neato", root=None): """ import pydot - msg = ( - "nx.nx_pydot.pydot_layout depends on the pydot package, which has " - "known issues and is not actively maintained.\n\n" - "See https://github.com/networkx/networkx/issues/5723" - ) - warnings.warn(msg, DeprecationWarning, stacklevel=2) P = to_pydot(G) if root is not None: P.set("root", str(root)) diff --git a/networkx/drawing/tests/test_pydot.py b/networkx/drawing/tests/test_pydot.py index a58aa1647c9..7085bbecea3 100644 --- a/networkx/drawing/tests/test_pydot.py +++ b/networkx/drawing/tests/test_pydot.py @@ -11,7 +11,6 @@ pydot = pytest.importorskip("pydot") -@pytest.mark.xfail class TestPydot: def pydot_checks(self, G, prog): """ diff --git a/pyproject.toml b/pyproject.toml index 02d9167204e..ee181b83e69 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,7 +78,7 @@ doc = [ extra = [ 'lxml>=4.6', 'pygraphviz>=1.11', - 'pydot>=1.4.2', + 'pydot>=2.0', 'sympy>=1.10', ] test = [ diff --git a/requirements/extra.txt b/requirements/extra.txt index b730004c452..07dc3499f76 100644 --- a/requirements/extra.txt +++ b/requirements/extra.txt @@ -2,5 +2,5 @@ # Do not edit this file; modify pyproject.toml instead. lxml>=4.6 pygraphviz>=1.11 -pydot>=1.4.2 +pydot>=2.0 sympy>=1.10 From f5730046cc1671c3003f7836dc4e24f175e53418 Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Sat, 6 Jan 2024 10:33:46 -0800 Subject: [PATCH 029/129] Future-proofing and improve tests (#7209) * Fix intermittent doctest failures for expanders with seeding. * Rm deprecated asftype. * Fix warnings filter for join. * Prepare for pandas copy-on-write. --- networkx/algorithms/centrality/group.py | 6 +++--- networkx/conftest.py | 4 +++- networkx/generators/expanders.py | 6 +++--- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/networkx/algorithms/centrality/group.py b/networkx/algorithms/centrality/group.py index 8207a71a5ae..ff17d8f32ed 100644 --- a/networkx/algorithms/centrality/group.py +++ b/networkx/algorithms/centrality/group.py @@ -498,15 +498,15 @@ def _heuristic(k, root, DF_tree, D, nodes, greedy): / root_node["sigma"][added_node][y] ) DF_tree.nodes[node_p]["sigma"][x][y] = root_node["sigma"][x][y] * (1 - dxvy) - DF_tree.nodes[node_p]["betweenness"][x][y] = ( + DF_tree.nodes[node_p]["betweenness"].loc[y, x] = ( root_node["betweenness"][x][y] - root_node["betweenness"][x][y] * dxvy ) if y != added_node: - DF_tree.nodes[node_p]["betweenness"][x][y] -= ( + DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= ( root_node["betweenness"][x][added_node] * dxyv ) if x != added_node: - DF_tree.nodes[node_p]["betweenness"][x][y] -= ( + DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= ( root_node["betweenness"][added_node][y] * dvxy ) diff --git a/networkx/conftest.py b/networkx/conftest.py index 8b1eeb5f97b..86cea45f944 100644 --- a/networkx/conftest.py +++ b/networkx/conftest.py @@ -117,7 +117,9 @@ def set_warnings(): "ignore", category=DeprecationWarning, message="\n\nThe `normalized`" ) warnings.filterwarnings( - "ignore", category=DeprecationWarning, message="function `join` is deprecated" + "ignore", + category=DeprecationWarning, + message="The function `join` is deprecated", ) warnings.filterwarnings( "ignore", diff --git a/networkx/generators/expanders.py b/networkx/generators/expanders.py index 2385682be58..2cd7976025a 100644 --- a/networkx/generators/expanders.py +++ b/networkx/generators/expanders.py @@ -248,7 +248,7 @@ def maybe_regular_expander(n, d, *, create_using=None, max_tries=100, seed=None) Examples -------- - >>> G = nx.maybe_regular_expander(n=200, d=6) + >>> G = nx.maybe_regular_expander(n=200, d=6, seed=8020) Returns ------- @@ -387,8 +387,8 @@ def is_regular_expander(G, *, epsilon=0): _, d = nx.utils.arbitrary_element(G.degree) - A = nx.adjacency_matrix(G) - lams = eigsh(A.asfptype(), which="LM", k=2, return_eigenvectors=False) + A = nx.adjacency_matrix(G, dtype=float) + lams = eigsh(A, which="LM", k=2, return_eigenvectors=False) # lambda2 is the second biggest eigenvalue lambda2 = min(lams) From f52696e58ad85161adda1c66870d3a7789f138de Mon Sep 17 00:00:00 2001 From: Jarrod Millman Date: Mon, 8 Jan 2024 21:37:38 -0800 Subject: [PATCH 030/129] Drop old dependencies per SPEC 0 (#7217) --- pyproject.toml | 4 ++-- requirements/default.txt | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ee181b83e69..c25e6217d91 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,9 +55,9 @@ nx-loopback = 'networkx.classes.tests.dispatch_interface:dispatcher' [project.optional-dependencies] default = [ - 'numpy>=1.22', + 'numpy>=1.23', 'scipy>=1.9,!=1.11.0,!=1.11.1', - 'matplotlib>=3.5', + 'matplotlib>=3.6', 'pandas>=1.4', ] developer = [ diff --git a/requirements/default.txt b/requirements/default.txt index 4a0afcd77da..97e1e259dd0 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,6 +1,6 @@ # Generated via tools/generate_requirements.py and pre-commit hook. # Do not edit this file; modify pyproject.toml instead. -numpy>=1.22 +numpy>=1.23 scipy>=1.9,!=1.11.0,!=1.11.1 -matplotlib>=3.5 +matplotlib>=3.6 pandas>=1.4 From 5c9e8c3585bea6b302aced95d71f614a34310b47 Mon Sep 17 00:00:00 2001 From: Jarrod Millman Date: Mon, 8 Jan 2024 21:37:49 -0800 Subject: [PATCH 031/129] Update pygraphviz (#7216) --- pyproject.toml | 2 +- requirements/extra.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c25e6217d91..078f4ae10bc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ doc = [ ] extra = [ 'lxml>=4.6', - 'pygraphviz>=1.11', + 'pygraphviz>=1.12', 'pydot>=2.0', 'sympy>=1.10', ] diff --git a/requirements/extra.txt b/requirements/extra.txt index 07dc3499f76..ad08057f4a1 100644 --- a/requirements/extra.txt +++ b/requirements/extra.txt @@ -1,6 +1,6 @@ # Generated via tools/generate_requirements.py and pre-commit hook. # Do not edit this file; modify pyproject.toml instead. lxml>=4.6 -pygraphviz>=1.11 +pygraphviz>=1.12 pydot>=2.0 sympy>=1.10 From e36da1da83564c788670885baedc1cee99b574ed Mon Sep 17 00:00:00 2001 From: smokestacklightnin <125844868+smokestacklightnin@users.noreply.github.com> Date: Tue, 9 Jan 2024 14:44:24 -0300 Subject: [PATCH 032/129] ENH : Provide non-normalized and normalized directed laplacian matrix calculation (#7199) Adds functionality for directed laplacian matrices * Remove the `not_implemented_for("directed")` decorator on `laplacian_matrix` * Add tests for 'laplacian_matrix` as applied to directed graphs * Add pointers to `directed_laplacian_matrix` and `directed_combinatorial_laplacian_matrix` in docstring of `laplacian_matrix` * Allow DiGraphs in `normalized_laplacian_matrix` --- networkx/linalg/laplacianmatrix.py | 83 ++++++++++++++++++++-- networkx/linalg/tests/test_laplacian.py | 94 +++++++++++++++++++++++++ 2 files changed, 173 insertions(+), 4 deletions(-) diff --git a/networkx/linalg/laplacianmatrix.py b/networkx/linalg/laplacianmatrix.py index 17bfd6d362b..bd9e1d3c45f 100644 --- a/networkx/linalg/laplacianmatrix.py +++ b/networkx/linalg/laplacianmatrix.py @@ -1,4 +1,11 @@ """Laplacian matrix of graphs. + +All calculations here are done using the out-degree. For Laplacians +using in-degree, us `G.reverse(copy=False)` instead of `G`. + +The `laplacian_matrix` function provides an unnormalized matrix, +while `normalized_laplacian_matrix`, `directed_laplacian_matrix`, +and `directed_combinatorial_laplacian_matrix` are all normalized. """ import networkx as nx from networkx.utils import not_implemented_for @@ -12,7 +19,6 @@ ] -@not_implemented_for("directed") @nx._dispatch(edge_attrs="weight") def laplacian_matrix(G, nodelist=None, weight="weight"): """Returns the Laplacian matrix of G. @@ -42,10 +48,19 @@ def laplacian_matrix(G, nodelist=None, weight="weight"): ----- For MultiGraph, the edges weights are summed. + This returns an unnormalized matrix. For a normalized output, + use `normalized_laplacian_matrix`, `directed_laplacian_matrix`, + or `directed_combinatorial_laplacian_matrix`. + + This calculation uses the out-degree of the graph `G`. To use the + in-degree for calculations instead, use `G.reverse(copy=False)` instead. + See Also -------- :func:`~networkx.convert_matrix.to_numpy_array` normalized_laplacian_matrix + directed_laplacian_matrix + directed_combinatorial_laplacian_matrix :func:`~networkx.linalg.spectrum.laplacian_spectrum` Examples @@ -62,6 +77,25 @@ def laplacian_matrix(G, nodelist=None, weight="weight"): [ 0 0 0 1 -1] [ 0 0 0 -1 1]] + >>> edges = [(1, 2), (2, 1), (2, 4), (4, 3), (3, 4),] + >>> DiG = nx.DiGraph(edges) + >>> print(nx.laplacian_matrix(DiG).toarray()) + [[ 1 -1 0 0] + [-1 2 -1 0] + [ 0 0 1 -1] + [ 0 0 -1 1]] + >>> G = nx.Graph(DiG) + >>> print(nx.laplacian_matrix(G).toarray()) + [[ 1 -1 0 0] + [-1 2 -1 0] + [ 0 -1 2 -1] + [ 0 0 -1 1]] + + References + ---------- + .. [1] Langville, Amy N., and Carl D. Meyer. Google’s PageRank and Beyond: + The Science of Search Engine Rankings. Princeton University Press, 2006. + """ import scipy as sp @@ -74,7 +108,6 @@ def laplacian_matrix(G, nodelist=None, weight="weight"): return D - A -@not_implemented_for("directed") @nx._dispatch(edge_attrs="weight") def normalized_laplacian_matrix(G, nodelist=None, weight="weight"): r"""Returns the normalized Laplacian matrix of G. @@ -114,10 +147,36 @@ def normalized_laplacian_matrix(G, nodelist=None, weight="weight"): If the Graph contains selfloops, D is defined as ``diag(sum(A, 1))``, where A is the adjacency matrix [2]_. + This calculation uses the out-degree of the graph `G`. To use the + in-degree for calculations instead, use `G.reverse(copy=False)` instead. + + For an unnormalized output, use `laplacian_matrix`. + + Examples + -------- + + >>> import numpy as np + >>> np.set_printoptions(precision=4) # To print with lower precision + >>> edges = [(1, 2), (2, 1), (2, 4), (4, 3), (3, 4),] + >>> DiG = nx.DiGraph(edges) + >>> print(nx.normalized_laplacian_matrix(DiG).toarray()) + [[ 1. -0.7071 0. 0. ] + [-0.7071 1. -0.7071 0. ] + [ 0. 0. 1. -1. ] + [ 0. 0. -1. 1. ]] + >>> G = nx.Graph(DiG) + >>> print(nx.normalized_laplacian_matrix(G).toarray()) + [[ 1. -0.7071 0. 0. ] + [-0.7071 1. -0.5 0. ] + [ 0. -0.5 1. -0.7071] + [ 0. 0. -0.7071 1. ]] + See Also -------- laplacian_matrix normalized_laplacian_spectrum + directed_laplacian_matrix + directed_combinatorial_laplacian_matrix References ---------- @@ -126,6 +185,8 @@ def normalized_laplacian_matrix(G, nodelist=None, weight="weight"): .. [2] Steve Butler, Interlacing For Weighted Graphs Using The Normalized Laplacian, Electronic Journal of Linear Algebra, Volume 16, pp. 90-98, March 2007. + .. [3] Langville, Amy N., and Carl D. Meyer. Google’s PageRank and Beyond: + The Science of Search Engine Rankings. Princeton University Press, 2006. """ import numpy as np import scipy as sp @@ -195,7 +256,7 @@ def directed_laplacian_matrix( .. math:: - L = I - (\Phi^{1/2} P \Phi^{-1/2} + \Phi^{-1/2} P^T \Phi^{1/2} ) / 2 + L = I - \frac{1}{2} \left (\Phi^{1/2} P \Phi^{-1/2} + \Phi^{-1/2} P^T \Phi^{1/2} \right ) where `I` is the identity matrix, `P` is the transition matrix of the graph, and `\Phi` a matrix with the Perron vector of `P` in the diagonal and @@ -237,9 +298,16 @@ def directed_laplacian_matrix( ----- Only implemented for DiGraphs + The result is always a symmetric matrix. + + This calculation uses the out-degree of the graph `G`. To use the + in-degree for calculations instead, use `G.reverse(copy=False)` instead. + See Also -------- laplacian_matrix + normalized_laplacian_matrix + directed_combinatorial_laplacian_matrix References ---------- @@ -287,7 +355,7 @@ def directed_combinatorial_laplacian_matrix( .. math:: - L = \Phi - (\Phi P + P^T \Phi) / 2 + L = \Phi - \frac{1}{2} \left (\Phi P + P^T \Phi \right) where `P` is the transition matrix of the graph and `\Phi` a matrix with the Perron vector of `P` in the diagonal and zeros elsewhere [1]_. @@ -328,9 +396,16 @@ def directed_combinatorial_laplacian_matrix( ----- Only implemented for DiGraphs + The result is always a symmetric matrix. + + This calculation uses the out-degree of the graph `G`. To use the + in-degree for calculations instead, use `G.reverse(copy=False)` instead. + See Also -------- laplacian_matrix + normalized_laplacian_matrix + directed_laplacian_matrix References ---------- diff --git a/networkx/linalg/tests/test_laplacian.py b/networkx/linalg/tests/test_laplacian.py index 78cddacd3bd..23f1b28e19f 100644 --- a/networkx/linalg/tests/test_laplacian.py +++ b/networkx/linalg/tests/test_laplacian.py @@ -24,6 +24,31 @@ def setup_class(cls): for node in cls.Gsl.nodes(): cls.Gsl.add_edge(node, node) + # Graph used as an example in Sec. 4.1 of Langville and Meyer, + # "Google's PageRank and Beyond". + cls.DiG = nx.DiGraph() + cls.DiG.add_edges_from( + ( + (1, 2), + (1, 3), + (3, 1), + (3, 2), + (3, 5), + (4, 5), + (4, 6), + (5, 4), + (5, 6), + (6, 4), + ) + ) + cls.DiMG = nx.MultiDiGraph(cls.DiG) + cls.DiWG = nx.DiGraph( + (u, v, {"weight": 0.5, "other": 0.3}) for (u, v) in cls.DiG.edges() + ) + cls.DiGsl = cls.DiG.copy() + for node in cls.DiGsl.nodes(): + cls.DiGsl.add_edge(node, node) + def test_laplacian(self): "Graph Laplacian" # fmt: off @@ -35,6 +60,16 @@ def test_laplacian(self): # fmt: on WL = 0.5 * NL OL = 0.3 * NL + # fmt: off + DiNL = np.array([[ 2, -1, -1, 0, 0, 0], + [ 0, 0, 0, 0, 0, 0], + [-1, -1, 3, -1, 0, 0], + [ 0, 0, 0, 2, -1, -1], + [ 0, 0, 0, -1, 2, -1], + [ 0, 0, 0, 0, -1, 1]]) + # fmt: on + DiWL = 0.5 * DiNL + DiOL = 0.3 * DiNL np.testing.assert_equal(nx.laplacian_matrix(self.G).todense(), NL) np.testing.assert_equal(nx.laplacian_matrix(self.MG).todense(), NL) np.testing.assert_equal( @@ -47,6 +82,20 @@ def test_laplacian(self): nx.laplacian_matrix(self.WG, weight="other").todense(), OL ) + np.testing.assert_equal(nx.laplacian_matrix(self.DiG).todense(), DiNL) + np.testing.assert_equal(nx.laplacian_matrix(self.DiMG).todense(), DiNL) + np.testing.assert_equal( + nx.laplacian_matrix(self.DiG, nodelist=[1, 2]).todense(), + np.array([[1, -1], [0, 0]]), + ) + np.testing.assert_equal(nx.laplacian_matrix(self.DiWG).todense(), DiWL) + np.testing.assert_equal( + nx.laplacian_matrix(self.DiWG, weight=None).todense(), DiNL + ) + np.testing.assert_equal( + nx.laplacian_matrix(self.DiWG, weight="other").todense(), DiOL + ) + def test_normalized_laplacian(self): "Generalized Graph Laplacian" # fmt: off @@ -65,6 +114,25 @@ def test_normalized_laplacian(self): [-0.2887, -0.3333, 0.6667, 0. , 0. ], [-0.3536, 0. , 0. , 0.5 , 0. ], [ 0. , 0. , 0. , 0. , 0. ]]) + + DiG = np.array([[ 1. , 0. , -0.4082, 0. , 0. , 0. ], + [ 0. , 0. , 0. , 0. , 0. , 0. ], + [-0.4082, 0. , 1. , 0. , -0.4082, 0. ], + [ 0. , 0. , 0. , 1. , -0.5 , -0.7071], + [ 0. , 0. , 0. , -0.5 , 1. , -0.7071], + [ 0. , 0. , 0. , -0.7071, 0. , 1. ]]) + DiGL = np.array([[ 1. , 0. , -0.4082, 0. , 0. , 0. ], + [ 0. , 0. , 0. , 0. , 0. , 0. ], + [-0.4082, 0. , 1. , -0.4082, 0. , 0. ], + [ 0. , 0. , 0. , 1. , -0.5 , -0.7071], + [ 0. , 0. , 0. , -0.5 , 1. , -0.7071], + [ 0. , 0. , 0. , 0. , -0.7071, 1. ]]) + DiLsl = np.array([[ 0.6667, -0.5774, -0.2887, 0. , 0. , 0. ], + [ 0. , 0. , 0. , 0. , 0. , 0. ], + [-0.2887, -0.5 , 0.75 , -0.2887, 0. , 0. ], + [ 0. , 0. , 0. , 0.6667, -0.3333, -0.4082], + [ 0. , 0. , 0. , -0.3333, 0.6667, -0.4082], + [ 0. , 0. , 0. , 0. , -0.4082, 0.5 ]]) # fmt: on np.testing.assert_almost_equal( @@ -90,6 +158,32 @@ def test_normalized_laplacian(self): nx.normalized_laplacian_matrix(self.Gsl).todense(), Lsl, decimal=3 ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix( + self.DiG, + nodelist=range(1, 1 + 6), + ).todense(), + DiG, + decimal=3, + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.DiG).todense(), DiGL, decimal=3 + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.DiMG).todense(), DiGL, decimal=3 + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.DiWG).todense(), DiGL, decimal=3 + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.DiWG, weight="other").todense(), + DiGL, + decimal=3, + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.DiGsl).todense(), DiLsl, decimal=3 + ) + def test_directed_laplacian(): "Directed Laplacian" From 205cd05ab07293ebbab8b7329adb00beb862fef0 Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Tue, 9 Jan 2024 19:57:56 -0800 Subject: [PATCH 033/129] Refactor geometric_soft_configuration_model tests for performance (#7210) * TST: Consolidate mean kappa/deg test and use smaller graph. * TST: Reduce size of test graphs. --- networkx/generators/tests/test_geometric.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/networkx/generators/tests/test_geometric.py b/networkx/generators/tests/test_geometric.py index 604a388f93f..f1c68bead51 100644 --- a/networkx/generators/tests/test_geometric.py +++ b/networkx/generators/tests/test_geometric.py @@ -387,19 +387,15 @@ def test_set_attributes_S1(): assert len(radii) == 100 -def test_mean_kappas_S1(): +def test_mean_kappas_mean_degree_S1(): G = nx.geometric_soft_configuration_graph( - beta=2.5, n=5000, gamma=2.7, mean_degree=10, seed=42 + beta=2.5, n=50, gamma=2.7, mean_degree=10, seed=8023 ) + kappas = nx.get_node_attributes(G, "kappa") mean_kappas = sum(kappas.values()) / len(kappas) assert math.fabs(mean_kappas - 10) < 0.5 - -def test_mean_degree_S1(): - G = nx.geometric_soft_configuration_graph( - beta=2.5, n=5000, gamma=2.7, mean_degree=10, seed=42 - ) degrees = dict(G.degree()) mean_degree = sum(degrees.values()) / len(degrees) assert math.fabs(mean_degree - 10) < 1 @@ -480,10 +476,10 @@ def test_mean_degree_influence_on_connectivity_S1(): def test_compare_mean_kappas_different_gammas_S1(): G1 = nx.geometric_soft_configuration_graph( - beta=1.5, n=2000, gamma=2.7, mean_degree=20, seed=42 + beta=1.5, n=20, gamma=2.7, mean_degree=5, seed=42 ) G2 = nx.geometric_soft_configuration_graph( - beta=1.5, n=2000, gamma=3.5, mean_degree=20, seed=42 + beta=1.5, n=20, gamma=3.5, mean_degree=5, seed=42 ) kappas1 = nx.get_node_attributes(G1, "kappa") mean_kappas1 = sum(kappas1.values()) / len(kappas1) From 0204a246a8bfba38153f03b3967a669c05a7181f Mon Sep 17 00:00:00 2001 From: Mauricio Souza de Alencar <856825+mdealencar@users.noreply.github.com> Date: Wed, 10 Jan 2024 18:30:19 +0100 Subject: [PATCH 034/129] Add `add_half_edge` method to PlanarEmbedding (#7202) Add a new method for adding edges to PlanarEmbedding as an alternative to the existing three methods add..._cw, add..._ccw, and add..._first_edge. This tightens the API and allows for dropping explicit tracking of the `first_nbr` internal state variable. * feat: drop the use of node attribute "first_nbr" in PlanarEmbedding (planarity.py) * feat: add method PlanarEmbedding.add_half_edge(), which superceeds the _cw and _ccw varieties, plus update the planar_drawing and tests accordingly (planar_drawing.py, planarity.py, test_planarity.py) --------- Co-authored-by: mdealencar Co-authored-by: Dan Schult Co-authored-by: Ross Barnowski --- networkx/algorithms/planar_drawing.py | 8 +- networkx/algorithms/planarity.py | 206 ++++++++++++-------- networkx/algorithms/tests/test_planarity.py | 88 ++++++--- 3 files changed, 193 insertions(+), 109 deletions(-) diff --git a/networkx/algorithms/planar_drawing.py b/networkx/algorithms/planar_drawing.py index 5c091e26e4d..ea25809b6ae 100644 --- a/networkx/algorithms/planar_drawing.py +++ b/networkx/algorithms/planar_drawing.py @@ -326,8 +326,8 @@ def triangulate_face(embedding, v1, v2): v1, v2, v3 = v2, v3, v4 else: # Add edge for triangulation - embedding.add_half_edge_cw(v1, v3, v2) - embedding.add_half_edge_ccw(v3, v1, v2) + embedding.add_half_edge(v1, v3, ccw=v2) + embedding.add_half_edge(v3, v1, cw=v2) v1, v2, v3 = v1, v3, v4 # Get next node _, v4 = embedding.next_face_half_edge(v2, v3) @@ -445,8 +445,8 @@ def make_bi_connected(embedding, starting_node, outgoing_node, edges_counted): # cycle is not completed yet if v2 in face_set: # v2 encountered twice: Add edge to ensure 2-connectedness - embedding.add_half_edge_cw(v1, v3, v2) - embedding.add_half_edge_ccw(v3, v1, v2) + embedding.add_half_edge(v1, v3, ccw=v2) + embedding.add_half_edge(v3, v1, cw=v2) edges_counted.add((v2, v3)) edges_counted.add((v3, v1)) v2 = v1 diff --git a/networkx/algorithms/planarity.py b/networkx/algorithms/planarity.py index ad46f4739e5..c5611eca39f 100644 --- a/networkx/algorithms/planarity.py +++ b/networkx/algorithms/planarity.py @@ -376,7 +376,7 @@ def lr_planarity(self): # initialize the embedding previous_node = None for w in self.ordered_adjs[v]: - self.embedding.add_half_edge_cw(v, w, previous_node) + self.embedding.add_half_edge(v, w, ccw=previous_node) previous_node = w # Free no longer used variables @@ -436,7 +436,7 @@ def lr_planarity_recursive(self): # initialize the embedding previous_node = None for w in self.ordered_adjs[v]: - self.embedding.add_half_edge_cw(v, w, previous_node) + self.embedding.add_half_edge(v, w, ccw=previous_node) previous_node = w # compute the complete embedding @@ -714,9 +714,9 @@ def dfs_embedding(self, v): break # handle next node in dfs_stack (i.e. w) else: # back edge if self.side[ei] == 1: - self.embedding.add_half_edge_cw(w, v, self.right_ref[w]) + self.embedding.add_half_edge(w, v, ccw=self.right_ref[w]) else: - self.embedding.add_half_edge_ccw(w, v, self.left_ref[w]) + self.embedding.add_half_edge(w, v, cw=self.left_ref[w]) self.left_ref[w] = v def dfs_embedding_recursive(self, v): @@ -731,10 +731,10 @@ def dfs_embedding_recursive(self, v): else: # back edge if self.side[ei] == 1: # place v directly after right_ref[w] in embed. list of w - self.embedding.add_half_edge_cw(w, v, self.right_ref[w]) + self.embedding.add_half_edge(w, v, ccw=self.right_ref[w]) else: # place v directly before left_ref[w] in embed. list of w - self.embedding.add_half_edge_ccw(w, v, self.left_ref[w]) + self.embedding.add_half_edge(w, v, cw=self.left_ref[w]) self.left_ref[w] = v def sign(self, e): @@ -791,15 +791,12 @@ class PlanarEmbedding(nx.DiGraph): * Edges must go in both directions (because the edge attributes differ) * Every edge must have a 'cw' and 'ccw' attribute which corresponds to a correct planar embedding. - * A node with non zero degree must have a node attribute 'first_nbr'. As long as a PlanarEmbedding is invalid only the following methods should be called: - * :meth:`add_half_edge_ccw` - * :meth:`add_half_edge_cw` + * :meth:`add_half_edge` * :meth:`connect_components` - * :meth:`add_half_edge_first` Even though the graph is a subclass of nx.DiGraph, it can still be used for algorithms that require undirected graphs, because the method @@ -808,14 +805,14 @@ class PlanarEmbedding(nx.DiGraph): **Half edges:** - In methods like `add_half_edge_ccw` the term "half-edge" is used, which is + In methods like `add_half_edge` the term "half-edge" is used, which is a term that is used in `doubly connected edge lists `_. It is used to emphasize that the edge is only in one direction and there exists another half-edge in the opposite direction. While conventional edges always have two faces (including outer face) next to them, it is possible to assign each half-edge *exactly one* face. - For a half-edge (u, v) that is orientated such that u is below v then the + For a half-edge (u, v) that is oriented such that u is below v then the face that belongs to (u, v) is to the right of this half-edge. See Also @@ -833,23 +830,23 @@ class PlanarEmbedding(nx.DiGraph): Create an embedding of a star graph (compare `nx.star_graph(3)`): >>> G = nx.PlanarEmbedding() - >>> G.add_half_edge_cw(0, 1, None) - >>> G.add_half_edge_cw(0, 2, 1) - >>> G.add_half_edge_cw(0, 3, 2) - >>> G.add_half_edge_cw(1, 0, None) - >>> G.add_half_edge_cw(2, 0, None) - >>> G.add_half_edge_cw(3, 0, None) + >>> G.add_half_edge(0, 1) + >>> G.add_half_edge(0, 2, ccw=1) + >>> G.add_half_edge(0, 3, ccw=2) + >>> G.add_half_edge(1, 0) + >>> G.add_half_edge(2, 0) + >>> G.add_half_edge(3, 0) Alternatively the same embedding can also be defined in counterclockwise orientation. The following results in exactly the same PlanarEmbedding: >>> G = nx.PlanarEmbedding() - >>> G.add_half_edge_ccw(0, 1, None) - >>> G.add_half_edge_ccw(0, 3, 1) - >>> G.add_half_edge_ccw(0, 2, 3) - >>> G.add_half_edge_ccw(1, 0, None) - >>> G.add_half_edge_ccw(2, 0, None) - >>> G.add_half_edge_ccw(3, 0, None) + >>> G.add_half_edge(0, 1) + >>> G.add_half_edge(0, 3, cw=1) + >>> G.add_half_edge(0, 2, cw=3) + >>> G.add_half_edge(1, 0) + >>> G.add_half_edge(2, 0) + >>> G.add_half_edge(3, 0) After creating a graph, it is possible to validate that the PlanarEmbedding object is correct: @@ -894,8 +891,10 @@ def set_data(self, data): """ for v in data: + ref = None for w in reversed(data[v]): - self.add_half_edge_first(v, w) + self.add_half_edge(v, w, cw=ref) + ref = w def neighbors_cw_order(self, v): """Generator for the neighbors of v in clockwise order. @@ -909,15 +908,89 @@ def neighbors_cw_order(self, v): node """ - if len(self[v]) == 0: + succs = self._succ[v] + if not succs: # v has no neighbors return - start_node = self.nodes[v]["first_nbr"] + start_node = next(reversed(succs)) yield start_node - current_node = self[v][start_node]["cw"] + current_node = succs[start_node]["cw"] while start_node != current_node: yield current_node - current_node = self[v][current_node]["cw"] + current_node = succs[current_node]["cw"] + + def add_half_edge(self, start_node, end_node, *, cw=None, ccw=None): + """Adds a half-edge from `start_node` to `end_node`. + + If the half-edge is not the first one out of `start_node`, a reference + node must be provided either in the clockwise (parameter `cw`) or in + the counterclockwise (parameter `ccw`) direction. Only one of `cw`/`ccw` + can be specified (or neither in the case of the first edge). + Note that specifying a reference in the clockwise (`cw`) direction means + inserting the new edge in the first counterclockwise position with + respect to the reference (and vice-versa). + + Parameters + ---------- + start_node : node + Start node of inserted edge. + end_node : node + End node of inserted edge. + cw, ccw: node + End node of reference edge. + Omit or pass `None` if adding the first out-half-edge of `start_node`. + + Raises + ------ + NetworkXException + If the `cw` or `ccw` node is not a successor of `start_node`. + If `start_node` has successors, but neither `cw` or `ccw` is provided. + If both `cw` and `ccw` are specified. + + See Also + -------- + connect_components + """ + + succs = self._succ.get(start_node) + if succs: + # there is already some edge out of start_node + leftmost_nbr = next(reversed(self._succ[start_node])) + if cw is not None: + if cw not in succs: + raise nx.NetworkXError("Invalid clockwise reference node.") + if ccw is not None: + raise nx.NetworkXError("Only one of cw/ccw can be specified.") + ref_ccw = succs[cw]["ccw"] + self.add_edge(start_node, end_node, cw=cw, ccw=ref_ccw) + succs[ref_ccw]["cw"] = end_node + succs[cw]["ccw"] = end_node + # when (cw == leftmost_nbr), the newly added neighbor is + # already at the end of dict self._succ[start_node] and + # takes the place of the former leftmost_nbr + move_leftmost_nbr_to_end = cw != leftmost_nbr + elif ccw is not None: + if ccw not in succs: + raise nx.NetworkXError("Invalid counterclockwise reference node.") + ref_cw = succs[ccw]["cw"] + self.add_edge(start_node, end_node, cw=ref_cw, ccw=ccw) + succs[ref_cw]["ccw"] = end_node + succs[ccw]["cw"] = end_node + move_leftmost_nbr_to_end = True + else: + raise nx.NetworkXError( + "Node already has out-half-edge(s), either cw or ccw reference node required." + ) + if move_leftmost_nbr_to_end: + # LRPlanarity (via self.add_half_edge_first()) requires that + # we keep track of the leftmost neighbor, which we accomplish + # by keeping it as the last key in dict self._succ[start_node] + succs[leftmost_nbr] = succs.pop(leftmost_nbr) + else: + if cw is not None or ccw is not None: + raise nx.NetworkXError("Invalid reference node.") + # adding the first edge out of start_node + self.add_edge(start_node, end_node, ccw=end_node, cw=end_node) def check_structure(self): """Runs without exceptions if this object is valid. @@ -927,7 +1000,6 @@ def check_structure(self): * Edges go in both directions (because the edge attributes differ). * Every edge has a 'cw' and 'ccw' attribute which corresponds to a correct planar embedding. - * A node with a degree larger than 0 has a node attribute 'first_nbr'. Running this method verifies that the underlying Graph must be planar. @@ -1000,24 +1072,12 @@ def add_half_edge_ccw(self, start_node, end_node, reference_neighbor): See Also -------- + add_half_edge add_half_edge_cw connect_components - add_half_edge_first """ - if reference_neighbor is None: - # The start node has no neighbors - self.add_edge(start_node, end_node) # Add edge to graph - self[start_node][end_node]["cw"] = end_node - self[start_node][end_node]["ccw"] = end_node - self.nodes[start_node]["first_nbr"] = end_node - else: - ccw_reference = self[start_node][reference_neighbor]["ccw"] - self.add_half_edge_cw(start_node, end_node, ccw_reference) - - if reference_neighbor == self.nodes[start_node].get("first_nbr", None): - # Update first neighbor - self.nodes[start_node]["first_nbr"] = end_node + self.add_half_edge(start_node, end_node, cw=reference_neighbor) def add_half_edge_cw(self, start_node, end_node, reference_neighbor): """Adds a half-edge from start_node to end_node. @@ -1041,31 +1101,11 @@ def add_half_edge_cw(self, start_node, end_node, reference_neighbor): See Also -------- + add_half_edge add_half_edge_ccw connect_components - add_half_edge_first """ - self.add_edge(start_node, end_node) # Add edge to graph - - if reference_neighbor is None: - # The start node has no neighbors - self[start_node][end_node]["cw"] = end_node - self[start_node][end_node]["ccw"] = end_node - self.nodes[start_node]["first_nbr"] = end_node - return - - if reference_neighbor not in self[start_node]: - raise nx.NetworkXException( - "Cannot add edge. Reference neighbor does not exist" - ) - - # Get half-edge at the other side - cw_reference = self[start_node][reference_neighbor]["cw"] - # Alter half-edge data structures - self[start_node][reference_neighbor]["cw"] = end_node - self[start_node][end_node]["cw"] = cw_reference - self[start_node][cw_reference]["ccw"] = end_node - self[start_node][end_node]["ccw"] = reference_neighbor + self.add_half_edge(start_node, end_node, ccw=reference_neighbor) def connect_components(self, v, w): """Adds half-edges for (v, w) and (w, v) at some position. @@ -1084,15 +1124,24 @@ def connect_components(self, v, w): See Also -------- - add_half_edge_ccw - add_half_edge_cw - add_half_edge_first + add_half_edge """ - self.add_half_edge_first(v, w) - self.add_half_edge_first(w, v) + if v in self._succ and self._succ[v]: + ref = next(reversed(self._succ[v])) + else: + ref = None + self.add_half_edge(v, w, cw=ref) + if w in self._succ and self._succ[w]: + ref = next(reversed(self._succ[w])) + else: + ref = None + self.add_half_edge(w, v, cw=ref) def add_half_edge_first(self, start_node, end_node): - """The added half-edge is inserted at the first position in the order. + """Add a half-edge and set end_node as start_node's leftmost neighbor. + + The new edge is inserted counterclockwise with respect to the current + leftmost neighbor, if there is one. Parameters ---------- @@ -1101,15 +1150,14 @@ def add_half_edge_first(self, start_node, end_node): See Also -------- - add_half_edge_ccw - add_half_edge_cw + add_half_edge connect_components """ - if start_node in self and "first_nbr" in self.nodes[start_node]: - reference = self.nodes[start_node]["first_nbr"] - else: - reference = None - self.add_half_edge_ccw(start_node, end_node, reference) + succs = self._succ.get(start_node) + # the leftmost neighbor is the last entry in the + # self._succ[start_node] dict + leftmost_nbr = next(reversed(succs)) if succs else None + self.add_half_edge(start_node, end_node, cw=leftmost_nbr) def next_face_half_edge(self, v, w): """Returns the following half-edge left of a face. diff --git a/networkx/algorithms/tests/test_planarity.py b/networkx/algorithms/tests/test_planarity.py index 470b1d23bb8..02b1d32a922 100644 --- a/networkx/algorithms/tests/test_planarity.py +++ b/networkx/algorithms/tests/test_planarity.py @@ -372,48 +372,82 @@ def check_counterexample(G, sub_graph): class TestPlanarEmbeddingClass: + def test_add_half_edge(self): + embedding = nx.PlanarEmbedding() + embedding.add_half_edge(0, 1) + with pytest.raises( + nx.NetworkXException, match="Invalid clockwise reference node." + ): + embedding.add_half_edge(0, 2, cw=3) + with pytest.raises( + nx.NetworkXException, match="Invalid counterclockwise reference node." + ): + embedding.add_half_edge(0, 2, ccw=3) + with pytest.raises( + nx.NetworkXException, match="Only one of cw/ccw can be specified." + ): + embedding.add_half_edge(0, 2, cw=1, ccw=1) + with pytest.raises( + nx.NetworkXException, + match=( + r"Node already has out-half-edge\(s\), either" + " cw or ccw reference node required." + ), + ): + embedding.add_half_edge(0, 2) + # these should work + embedding.add_half_edge(0, 2, cw=1) + embedding.add_half_edge(0, 3, ccw=1) + assert sorted(embedding.edges(data=True)) == [ + (0, 1, {"ccw": 2, "cw": 3}), + (0, 2, {"cw": 1, "ccw": 3}), + (0, 3, {"cw": 2, "ccw": 1}), + ] + def test_get_data(self): - embedding = self.get_star_embedding(3) + embedding = self.get_star_embedding(4) data = embedding.get_data() - data_cmp = {0: [2, 1], 1: [0], 2: [0]} + data_cmp = {0: [3, 2, 1], 1: [0], 2: [0], 3: [0]} assert data == data_cmp def test_missing_edge_orientation(self): + embedding = nx.PlanarEmbedding() + embedding.add_edge(1, 2) + embedding.add_edge(2, 1) with pytest.raises(nx.NetworkXException): - embedding = nx.PlanarEmbedding() - embedding.add_edge(1, 2) - embedding.add_edge(2, 1) # Invalid structure because the orientation of the edge was not set embedding.check_structure() def test_invalid_edge_orientation(self): + embedding = nx.PlanarEmbedding() + embedding.add_half_edge(1, 2) + embedding.add_half_edge(2, 1) + embedding.add_edge(1, 3) with pytest.raises(nx.NetworkXException): - embedding = nx.PlanarEmbedding() - embedding.add_half_edge_first(1, 2) - embedding.add_half_edge_first(2, 1) - embedding.add_edge(1, 3) embedding.check_structure() def test_missing_half_edge(self): + embedding = nx.PlanarEmbedding() + embedding.add_half_edge(1, 2) with pytest.raises(nx.NetworkXException): - embedding = nx.PlanarEmbedding() - embedding.add_half_edge_first(1, 2) # Invalid structure because other half edge is missing embedding.check_structure() def test_not_fulfilling_euler_formula(self): + embedding = nx.PlanarEmbedding() + for i in range(5): + ref = None + for j in range(5): + if i != j: + embedding.add_half_edge(i, j, cw=ref) + ref = j with pytest.raises(nx.NetworkXException): - embedding = nx.PlanarEmbedding() - for i in range(5): - for j in range(5): - if i != j: - embedding.add_half_edge_first(i, j) embedding.check_structure() def test_missing_reference(self): - with pytest.raises(nx.NetworkXException): - embedding = nx.PlanarEmbedding() - embedding.add_half_edge_cw(1, 2, 3) + embedding = nx.PlanarEmbedding() + with pytest.raises(nx.NetworkXException, match="Invalid reference node."): + embedding.add_half_edge(1, 2, ccw=3) def test_connect_components(self): embedding = nx.PlanarEmbedding() @@ -421,22 +455,24 @@ def test_connect_components(self): def test_successful_face_traversal(self): embedding = nx.PlanarEmbedding() - embedding.add_half_edge_first(1, 2) - embedding.add_half_edge_first(2, 1) + embedding.add_half_edge(1, 2) + embedding.add_half_edge(2, 1) face = embedding.traverse_face(1, 2) assert face == [1, 2] def test_unsuccessful_face_traversal(self): + embedding = nx.PlanarEmbedding() + embedding.add_edge(1, 2, ccw=2, cw=3) + embedding.add_edge(2, 1, ccw=1, cw=3) with pytest.raises(nx.NetworkXException): - embedding = nx.PlanarEmbedding() - embedding.add_edge(1, 2, ccw=2, cw=3) - embedding.add_edge(2, 1, ccw=1, cw=3) embedding.traverse_face(1, 2) @staticmethod def get_star_embedding(n): embedding = nx.PlanarEmbedding() + ref = None for i in range(1, n): - embedding.add_half_edge_first(0, i) - embedding.add_half_edge_first(i, 0) + embedding.add_half_edge(0, i, cw=ref) + ref = i + embedding.add_half_edge(i, 0) return embedding From 7e01590c9d493ea05a9395075e7c4ca5c3917248 Mon Sep 17 00:00:00 2001 From: AKSHAYA MADHURI <76612327+akshayamadhuri@users.noreply.github.com> Date: Thu, 11 Jan 2024 06:19:32 +0530 Subject: [PATCH 035/129] Improve docs for optimal_edit_paths (#7130) Provide further detail on how optimal edit paths can be used to transform G1 to be isomorphic with G2. Co-authored-by: Dan Schult Co-authored-by: Ross Barnowski --- networkx/algorithms/similarity.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/networkx/algorithms/similarity.py b/networkx/algorithms/similarity.py index 3a1cfff8147..183b41f66b6 100644 --- a/networkx/algorithms/similarity.py +++ b/networkx/algorithms/similarity.py @@ -322,7 +322,8 @@ def optimal_edit_paths( edge_edit_path : list of tuples ((u1, v1), (u2, v2)) cost : numeric - Optimal edit path cost (graph edit distance). + Optimal edit path cost (graph edit distance). When the cost + is zero, it indicates that `G1` and `G2` are isomorphic. Examples -------- @@ -334,6 +335,14 @@ def optimal_edit_paths( >>> cost 5.0 + Notes + ----- + To transform `G1` into a graph isomorphic to `G2`, apply the node + and edge edits in the returned ``edit_paths``. + In the case of isomorphic graphs, the cost is zero, and the paths + represent different isomorphic mappings (isomorphisms). That is, the + edits involve renaming nodes and edges to match the structure of `G2`. + See Also -------- graph_edit_distance, optimize_edit_paths From 54eca158c1ec10ea996fb4744b030bd47a42ff73 Mon Sep 17 00:00:00 2001 From: Nihal John George <31016048+nihalgeorge01@users.noreply.github.com> Date: Thu, 11 Jan 2024 02:25:16 -0500 Subject: [PATCH 036/129] Fix random_spanning_tree() for single node and empty graphs (#7211) --- networkx/algorithms/tree/mst.py | 13 ++++++++---- networkx/algorithms/tree/tests/test_mst.py | 24 ++++++++++++++++++++++ 2 files changed, 33 insertions(+), 4 deletions(-) diff --git a/networkx/algorithms/tree/mst.py b/networkx/algorithms/tree/mst.py index f4cec03dc30..ee264d41ae5 100644 --- a/networkx/algorithms/tree/mst.py +++ b/networkx/algorithms/tree/mst.py @@ -897,14 +897,15 @@ def spanning_tree_total_weight(G, weight): # itself. if G.number_of_edges() == 1: return G.edges(data=weight).__iter__().__next__()[2] - # 2. There are more than two edges in the graph. Then, we can find the + # 2. There are no edges or two or more edges in the graph. Then, we find the # total weight of the spanning trees using the formula in the - # reference paper: take the weight of that edge and multiple it by - # the number of spanning trees which have to include that edge. This + # reference paper: take the weight of each edge and multiply it by + # the number of spanning trees which include that edge. This # can be accomplished by contracting the edge and finding the # multiplicative total spanning tree weight if the weight of each edge # is assumed to be 1, which is conveniently built into networkx already, - # by calling total_spanning_tree_weight with weight=None + # by calling total_spanning_tree_weight with weight=None. + # Note that with no edges the returned value is just zero. else: total = 0 for u, v, w in G.edges(data=weight): @@ -913,6 +914,10 @@ def spanning_tree_total_weight(G, weight): ) return total + if G.number_of_nodes() < 2: + # no edges in the spanning tree + return nx.empty_graph(G.nodes) + U = set() st_cached_value = 0 V = set(G.edges()) diff --git a/networkx/algorithms/tree/tests/test_mst.py b/networkx/algorithms/tree/tests/test_mst.py index 373f16cf7a0..bc55000a6e6 100644 --- a/networkx/algorithms/tree/tests/test_mst.py +++ b/networkx/algorithms/tree/tests/test_mst.py @@ -706,3 +706,27 @@ def test_random_spanning_tree_additive_large(): # Assert that p is greater than the significance level so that we do not # reject the null hypothesis assert not p < 0.05 + + +def test_random_spanning_tree_empty_graph(): + G = nx.Graph() + rst = nx.tree.random_spanning_tree(G) + assert len(rst.nodes) == 0 + assert len(rst.edges) == 0 + + +def test_random_spanning_tree_single_node_graph(): + G = nx.Graph() + G.add_node(0) + rst = nx.tree.random_spanning_tree(G) + assert len(rst.nodes) == 1 + assert len(rst.edges) == 0 + + +def test_random_spanning_tree_single_node_loop(): + G = nx.Graph() + G.add_node(0) + G.add_edge(0, 0) + rst = nx.tree.random_spanning_tree(G) + assert len(rst.nodes) == 1 + assert len(rst.edges) == 0 From 5a7800e7cccc1da85f67c35f3068370f3f588ea5 Mon Sep 17 00:00:00 2001 From: Mauricio Souza de Alencar <856825+mdealencar@users.noreply.github.com> Date: Thu, 11 Jan 2024 20:00:21 +0100 Subject: [PATCH 037/129] Update methods for modifying edges in PlanarEmbedding (#6798) Implements edge and node removal methods for the PlanarEmbedding class, with all of the additional checking required to ensure edge attributes are updated properly. Also explicitly disallows the methods for adding edges, which may invalidate the planarity Co-authored-by: mdealencar Co-authored-by: Dan Schult Co-authored-by: mdealencar --- networkx/algorithms/planarity.py | 166 +++++++++++++++++++- networkx/algorithms/tests/test_planarity.py | 77 +++++++-- 2 files changed, 230 insertions(+), 13 deletions(-) diff --git a/networkx/algorithms/planarity.py b/networkx/algorithms/planarity.py index c5611eca39f..59206b412e0 100644 --- a/networkx/algorithms/planarity.py +++ b/networkx/algorithms/planarity.py @@ -855,6 +855,22 @@ class PlanarEmbedding(nx.DiGraph): """ + def __init__(self, incoming_graph_data=None, **attr): + super().__init__(incoming_graph_data=incoming_graph_data, **attr) + self.add_edge = self.__forbidden + self.add_edges_from = self.__forbidden + self.add_weighted_edges_from = self.__forbidden + + def __forbidden(self, *args, **kwargs): + """Forbidden operation + + Any edge additions to a PlanarEmbedding should be done using + method `add_half_edge`. + """ + raise NotImplementedError( + "Use `add_half_edge` method to add edges to a PlanarEmbedding." + ) + def get_data(self): """Converts the adjacency structure into a better readable structure. @@ -896,6 +912,75 @@ def set_data(self, data): self.add_half_edge(v, w, cw=ref) ref = w + def remove_node(self, n): + """Remove node n. + + Removes the node n and all adjacent edges, updating the + PlanarEmbedding to account for any resulting edge removal. + Attempting to remove a non-existent node will raise an exception. + + Parameters + ---------- + n : node + A node in the graph + + Raises + ------ + NetworkXError + If n is not in the graph. + + See Also + -------- + remove_nodes_from + + """ + try: + for u in self._pred[n]: + succs_u = self._succ[u] + un_cw = succs_u[n]["cw"] + un_ccw = succs_u[n]["ccw"] + del succs_u[n] + del self._pred[u][n] + if n != un_cw: + succs_u[un_cw]["ccw"] = un_ccw + succs_u[un_ccw]["cw"] = un_cw + del self._node[n] + del self._succ[n] + del self._pred[n] + except KeyError as err: # NetworkXError if n not in self + raise nx.NetworkXError( + f"The node {n} is not in the planar embedding." + ) from err + + def remove_nodes_from(self, nodes): + """Remove multiple nodes. + + Parameters + ---------- + nodes : iterable container + A container of nodes (list, dict, set, etc.). If a node + in the container is not in the graph it is silently ignored. + + See Also + -------- + remove_node + + Notes + ----- + When removing nodes from an iterator over the graph you are changing, + a `RuntimeError` will be raised with message: + `RuntimeError: dictionary changed size during iteration`. This + happens when the graph's underlying dictionary is modified during + iteration. To avoid this error, evaluate the iterator into a separate + object, e.g. by using `list(iterator_of_nodes)`, and pass this + object to `G.remove_nodes_from`. + + """ + for n in nodes: + if n in self._node: + self.remove_node(n) + # silently skip non-existing nodes + def neighbors_cw_order(self, v): """Generator for the neighbors of v in clockwise order. @@ -940,6 +1025,7 @@ def add_half_edge(self, start_node, end_node, *, cw=None, ccw=None): End node of reference edge. Omit or pass `None` if adding the first out-half-edge of `start_node`. + Raises ------ NetworkXException @@ -962,7 +1048,7 @@ def add_half_edge(self, start_node, end_node, *, cw=None, ccw=None): if ccw is not None: raise nx.NetworkXError("Only one of cw/ccw can be specified.") ref_ccw = succs[cw]["ccw"] - self.add_edge(start_node, end_node, cw=cw, ccw=ref_ccw) + super().add_edge(start_node, end_node, cw=cw, ccw=ref_ccw) succs[ref_ccw]["cw"] = end_node succs[cw]["ccw"] = end_node # when (cw == leftmost_nbr), the newly added neighbor is @@ -973,7 +1059,7 @@ def add_half_edge(self, start_node, end_node, *, cw=None, ccw=None): if ccw not in succs: raise nx.NetworkXError("Invalid counterclockwise reference node.") ref_cw = succs[ccw]["cw"] - self.add_edge(start_node, end_node, cw=ref_cw, ccw=ccw) + super().add_edge(start_node, end_node, cw=ref_cw, ccw=ccw) succs[ref_cw]["ccw"] = end_node succs[ccw]["cw"] = end_node move_leftmost_nbr_to_end = True @@ -986,11 +1072,12 @@ def add_half_edge(self, start_node, end_node, *, cw=None, ccw=None): # we keep track of the leftmost neighbor, which we accomplish # by keeping it as the last key in dict self._succ[start_node] succs[leftmost_nbr] = succs.pop(leftmost_nbr) + else: if cw is not None or ccw is not None: raise nx.NetworkXError("Invalid reference node.") # adding the first edge out of start_node - self.add_edge(start_node, end_node, ccw=end_node, cw=end_node) + super().add_edge(start_node, end_node, ccw=end_node, cw=end_node) def check_structure(self): """Runs without exceptions if this object is valid. @@ -1107,6 +1194,79 @@ def add_half_edge_cw(self, start_node, end_node, reference_neighbor): """ self.add_half_edge(start_node, end_node, ccw=reference_neighbor) + def remove_edge(self, u, v): + """Remove the edge between u and v. + + Parameters + ---------- + u, v : nodes + Remove the half-edges (u, v) and (v, u) and update the + edge ordering around the removed edge. + + Raises + ------ + NetworkXError + If there is not an edge between u and v. + + See Also + -------- + remove_edges_from : remove a collection of edges + """ + try: + succs_u = self._succ[u] + succs_v = self._succ[v] + uv_cw = succs_u[v]["cw"] + uv_ccw = succs_u[v]["ccw"] + vu_cw = succs_v[u]["cw"] + vu_ccw = succs_v[u]["ccw"] + del succs_u[v] + del self._pred[v][u] + del succs_v[u] + del self._pred[u][v] + if v != uv_cw: + succs_u[uv_cw]["ccw"] = uv_ccw + succs_u[uv_ccw]["cw"] = uv_cw + if u != vu_cw: + succs_v[vu_cw]["ccw"] = vu_ccw + succs_v[vu_ccw]["cw"] = vu_cw + except KeyError as err: + raise nx.NetworkXError( + f"The edge {u}-{v} is not in the planar embedding." + ) from err + + def remove_edges_from(self, ebunch): + """Remove all edges specified in ebunch. + + Parameters + ---------- + ebunch: list or container of edge tuples + Each pair of half-edges between the nodes given in the tuples + will be removed from the graph. The nodes can be passed as: + + - 2-tuples (u, v) half-edges (u, v) and (v, u). + - 3-tuples (u, v, k) where k is ignored. + + See Also + -------- + remove_edge : remove a single edge + + Notes + ----- + Will fail silently if an edge in ebunch is not in the graph. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> ebunch = [(1, 2), (2, 3)] + >>> G.remove_edges_from(ebunch) + """ + for e in ebunch: + u, v = e[:2] # ignore edge data + # assuming that the PlanarEmbedding is valid, if the half_edge + # (u, v) is in the graph, then so is half_edge (v, u) + if u in self._succ and v in self._succ[u]: + self.remove_edge(u, v) + def connect_components(self, v, w): """Adds half-edges for (v, w) and (w, v) at some position. diff --git a/networkx/algorithms/tests/test_planarity.py b/networkx/algorithms/tests/test_planarity.py index 02b1d32a922..99bcff4184a 100644 --- a/networkx/algorithms/tests/test_planarity.py +++ b/networkx/algorithms/tests/test_planarity.py @@ -277,6 +277,20 @@ def test_counterexample_planar_recursive(self): G.add_node(1) get_counterexample_recursive(G) + def test_edge_removal_from_planar_embedding(self): + # PlanarEmbedding.check_structure() must succeed after edge removal + edges = ((0, 1), (1, 2), (2, 3), (3, 4), (4, 0), (0, 2), (0, 3)) + G = nx.Graph(edges) + cert, P = nx.check_planarity(G) + assert cert is True + P.remove_edge(0, 2) + self.check_graph(P, is_planar=True) + P.add_half_edge_ccw(1, 3, 2) + P.add_half_edge_cw(3, 1, 2) + self.check_graph(P, is_planar=True) + P.remove_edges_from(((0, 3), (1, 3))) + self.check_graph(P, is_planar=True) + def check_embedding(G, embedding): """Raises an exception if the combinatorial embedding is not correct @@ -410,19 +424,51 @@ def test_get_data(self): data_cmp = {0: [3, 2, 1], 1: [0], 2: [0], 3: [0]} assert data == data_cmp - def test_missing_edge_orientation(self): + def test_edge_removal(self): embedding = nx.PlanarEmbedding() - embedding.add_edge(1, 2) - embedding.add_edge(2, 1) + embedding.set_data( + { + 1: [2, 5, 7], + 2: [1, 3, 4, 5], + 3: [2, 4], + 4: [3, 6, 5, 2], + 5: [7, 1, 2, 4], + 6: [4, 7], + 7: [6, 1, 5], + } + ) + # remove_edges_from() calls remove_edge(), so both are tested here + embedding.remove_edges_from(((5, 4), (1, 5))) + embedding.check_structure() + embedding_expected = nx.PlanarEmbedding() + embedding_expected.set_data( + { + 1: [2, 7], + 2: [1, 3, 4, 5], + 3: [2, 4], + 4: [3, 6, 2], + 5: [7, 2], + 6: [4, 7], + 7: [6, 1, 5], + } + ) + assert nx.utils.graphs_equal(embedding, embedding_expected) + + def test_missing_edge_orientation(self): + embedding = nx.PlanarEmbedding({1: {2: {}}, 2: {1: {}}}) with pytest.raises(nx.NetworkXException): # Invalid structure because the orientation of the edge was not set embedding.check_structure() def test_invalid_edge_orientation(self): - embedding = nx.PlanarEmbedding() - embedding.add_half_edge(1, 2) - embedding.add_half_edge(2, 1) - embedding.add_edge(1, 3) + embedding = nx.PlanarEmbedding( + { + 1: {2: {"cw": 2, "ccw": 2}}, + 2: {1: {"cw": 1, "ccw": 1}}, + 1: {3: {}}, + 3: {1: {}}, + } + ) with pytest.raises(nx.NetworkXException): embedding.check_structure() @@ -461,12 +507,23 @@ def test_successful_face_traversal(self): assert face == [1, 2] def test_unsuccessful_face_traversal(self): - embedding = nx.PlanarEmbedding() - embedding.add_edge(1, 2, ccw=2, cw=3) - embedding.add_edge(2, 1, ccw=1, cw=3) + embedding = nx.PlanarEmbedding( + {1: {2: {"cw": 3, "ccw": 2}}, 2: {1: {"cw": 3, "ccw": 1}}} + ) with pytest.raises(nx.NetworkXException): embedding.traverse_face(1, 2) + def test_forbidden_methods(self): + embedding = nx.PlanarEmbedding() + embedding.add_node(42) # no exception + embedding.add_nodes_from([(23, 24)]) # no exception + with pytest.raises(NotImplementedError): + embedding.add_edge(1, 3) + with pytest.raises(NotImplementedError): + embedding.add_edges_from([(0, 2), (1, 4)]) + with pytest.raises(NotImplementedError): + embedding.add_weighted_edges_from([(0, 2, 350), (1, 4, 125)]) + @staticmethod def get_star_embedding(n): embedding = nx.PlanarEmbedding() From 83030a626d50f507e27a88f40b4c6c75b85cb3ae Mon Sep 17 00:00:00 2001 From: Erik Welch Date: Fri, 12 Jan 2024 08:43:49 -0600 Subject: [PATCH 038/129] Rename `_dispatch` to `_dispatchable` (#7193) Co-authored-by: Dan Schult --- doc/developer/nxeps/nxep-0003.rst | 2 +- doc/reference/utils.rst | 2 +- networkx/__init__.py | 2 +- networkx/algorithms/approximation/clique.py | 8 +-- .../approximation/clustering_coefficient.py | 2 +- .../algorithms/approximation/connectivity.py | 6 +-- .../approximation/distance_measures.py | 2 +- .../approximation/dominating_set.py | 4 +- .../algorithms/approximation/kcomponents.py | 2 +- networkx/algorithms/approximation/matching.py | 2 +- networkx/algorithms/approximation/maxcut.py | 4 +- networkx/algorithms/approximation/ramsey.py | 2 +- .../algorithms/approximation/steinertree.py | 4 +- .../approximation/traveling_salesman.py | 16 +++--- .../algorithms/approximation/treewidth.py | 6 +-- .../algorithms/approximation/vertex_cover.py | 2 +- .../algorithms/assortativity/connectivity.py | 2 +- .../algorithms/assortativity/correlation.py | 8 +-- networkx/algorithms/assortativity/mixing.py | 8 +-- .../assortativity/neighbor_degree.py | 2 +- networkx/algorithms/assortativity/pairs.py | 4 +- networkx/algorithms/asteroidal.py | 6 +-- networkx/algorithms/bipartite/basic.py | 12 ++--- networkx/algorithms/bipartite/centrality.py | 6 +-- networkx/algorithms/bipartite/cluster.py | 6 +-- networkx/algorithms/bipartite/covering.py | 2 +- networkx/algorithms/bipartite/edgelist.py | 4 +- networkx/algorithms/bipartite/generators.py | 16 +++--- networkx/algorithms/bipartite/matching.py | 8 +-- networkx/algorithms/bipartite/matrix.py | 4 +- networkx/algorithms/bipartite/projection.py | 10 ++-- networkx/algorithms/bipartite/redundancy.py | 2 +- networkx/algorithms/bipartite/spectral.py | 2 +- networkx/algorithms/boundary.py | 4 +- networkx/algorithms/bridges.py | 6 +-- networkx/algorithms/centrality/betweenness.py | 4 +- .../centrality/betweenness_subset.py | 4 +- networkx/algorithms/centrality/closeness.py | 4 +- .../centrality/current_flow_betweenness.py | 6 +-- .../current_flow_betweenness_subset.py | 4 +- .../centrality/current_flow_closeness.py | 2 +- networkx/algorithms/centrality/degree_alg.py | 6 +-- networkx/algorithms/centrality/dispersion.py | 2 +- networkx/algorithms/centrality/eigenvector.py | 4 +- networkx/algorithms/centrality/flow_matrix.py | 2 +- networkx/algorithms/centrality/group.py | 12 ++--- networkx/algorithms/centrality/harmonic.py | 2 +- networkx/algorithms/centrality/katz.py | 4 +- networkx/algorithms/centrality/laplacian.py | 2 +- networkx/algorithms/centrality/load.py | 4 +- networkx/algorithms/centrality/percolation.py | 2 +- networkx/algorithms/centrality/reaching.py | 4 +- .../algorithms/centrality/second_order.py | 2 +- .../algorithms/centrality/subgraph_alg.py | 8 +-- networkx/algorithms/centrality/trophic.py | 6 +-- .../algorithms/centrality/voterank_alg.py | 2 +- networkx/algorithms/chains.py | 2 +- networkx/algorithms/chordal.py | 10 ++-- networkx/algorithms/clique.py | 14 +++--- networkx/algorithms/cluster.py | 12 ++--- .../algorithms/coloring/equitable_coloring.py | 8 +-- .../algorithms/coloring/greedy_coloring.py | 18 +++---- networkx/algorithms/communicability_alg.py | 4 +- networkx/algorithms/community/asyn_fluid.py | 2 +- networkx/algorithms/community/centrality.py | 2 +- .../algorithms/community/community_utils.py | 2 +- networkx/algorithms/community/kclique.py | 2 +- .../algorithms/community/kernighan_lin.py | 2 +- .../algorithms/community/label_propagation.py | 6 +-- networkx/algorithms/community/louvain.py | 4 +- networkx/algorithms/community/lukes.py | 2 +- .../algorithms/community/modularity_max.py | 4 +- networkx/algorithms/community/quality.py | 10 ++-- networkx/algorithms/components/attracting.py | 6 +-- networkx/algorithms/components/biconnected.py | 8 +-- networkx/algorithms/components/connected.py | 8 +-- .../algorithms/components/semiconnected.py | 2 +- .../components/strongly_connected.py | 12 ++--- .../components/tests/test_connected.py | 2 +- .../algorithms/components/weakly_connected.py | 6 +-- .../algorithms/connectivity/connectivity.py | 12 ++--- networkx/algorithms/connectivity/cuts.py | 8 +-- .../algorithms/connectivity/disjoint_paths.py | 4 +- .../connectivity/edge_augmentation.py | 26 +++++----- .../connectivity/edge_kcomponents.py | 8 +-- .../algorithms/connectivity/kcomponents.py | 2 +- networkx/algorithms/connectivity/kcutsets.py | 2 +- .../algorithms/connectivity/stoerwagner.py | 2 +- networkx/algorithms/connectivity/utils.py | 4 +- networkx/algorithms/core.py | 14 +++--- networkx/algorithms/covering.py | 4 +- networkx/algorithms/cuts.py | 16 +++--- networkx/algorithms/cycles.py | 14 +++--- networkx/algorithms/d_separation.py | 6 +-- networkx/algorithms/dag.py | 36 ++++++------- networkx/algorithms/distance_measures.py | 18 +++---- networkx/algorithms/distance_regular.py | 6 +-- networkx/algorithms/dominance.py | 4 +- networkx/algorithms/dominating.py | 4 +- networkx/algorithms/efficiency_measures.py | 6 +-- networkx/algorithms/euler.py | 12 ++--- networkx/algorithms/flow/boykovkolmogorov.py | 2 +- networkx/algorithms/flow/capacityscaling.py | 4 +- networkx/algorithms/flow/dinitz_alg.py | 2 +- networkx/algorithms/flow/edmondskarp.py | 4 +- networkx/algorithms/flow/gomory_hu.py | 2 +- networkx/algorithms/flow/maxflow.py | 8 +-- networkx/algorithms/flow/mincost.py | 12 +++-- networkx/algorithms/flow/networksimplex.py | 4 +- networkx/algorithms/flow/preflowpush.py | 2 +- .../algorithms/flow/shortestaugmentingpath.py | 2 +- networkx/algorithms/flow/utils.py | 6 +-- networkx/algorithms/graph_hashing.py | 4 +- networkx/algorithms/graphical.py | 12 ++--- networkx/algorithms/hierarchy.py | 2 +- networkx/algorithms/hybrid.py | 4 +- networkx/algorithms/isolate.py | 6 +-- networkx/algorithms/isomorphism/isomorph.py | 8 +-- .../isomorphism/tree_isomorphism.py | 8 +-- networkx/algorithms/isomorphism/vf2pp.py | 6 +-- networkx/algorithms/link_analysis/hits_alg.py | 2 +- .../algorithms/link_analysis/pagerank_alg.py | 4 +- .../link_analysis/tests/test_pagerank.py | 2 +- networkx/algorithms/link_prediction.py | 16 +++--- .../algorithms/lowest_common_ancestors.py | 6 +-- networkx/algorithms/matching.py | 12 ++--- networkx/algorithms/minors/contraction.py | 6 +-- networkx/algorithms/mis.py | 2 +- networkx/algorithms/moral.py | 2 +- networkx/algorithms/node_classification.py | 4 +- networkx/algorithms/non_randomness.py | 2 +- networkx/algorithms/operators/all.py | 8 +-- networkx/algorithms/operators/binary.py | 14 +++--- networkx/algorithms/operators/product.py | 14 +++--- .../algorithms/operators/tests/test_binary.py | 4 +- networkx/algorithms/operators/unary.py | 4 +- networkx/algorithms/planarity.py | 10 ++-- networkx/algorithms/polynomials.py | 4 +- networkx/algorithms/reciprocity.py | 4 +- networkx/algorithms/regular.py | 6 +-- networkx/algorithms/richclub.py | 2 +- networkx/algorithms/shortest_paths/astar.py | 4 +- networkx/algorithms/shortest_paths/dense.py | 8 +-- networkx/algorithms/shortest_paths/generic.py | 14 +++--- .../algorithms/shortest_paths/unweighted.py | 16 +++--- .../algorithms/shortest_paths/weighted.py | 50 +++++++++---------- networkx/algorithms/similarity.py | 14 +++--- networkx/algorithms/simple_paths.py | 8 +-- networkx/algorithms/smallworld.py | 8 +-- networkx/algorithms/smetric.py | 2 +- networkx/algorithms/sparsifiers.py | 2 +- networkx/algorithms/structuralholes.py | 10 ++-- networkx/algorithms/summarization.py | 4 +- networkx/algorithms/swap.py | 6 +-- .../algorithms/tests/test_structuralholes.py | 2 +- networkx/algorithms/threshold.py | 10 ++-- networkx/algorithms/time_dependent.py | 2 +- networkx/algorithms/tournament.py | 14 +++--- networkx/algorithms/traversal/beamsearch.py | 2 +- .../traversal/breadth_first_search.py | 16 +++--- .../traversal/depth_first_search.py | 14 +++--- networkx/algorithms/traversal/edgebfs.py | 2 +- networkx/algorithms/traversal/edgedfs.py | 2 +- networkx/algorithms/tree/branchings.py | 14 +++--- networkx/algorithms/tree/coding.py | 8 +-- networkx/algorithms/tree/decomposition.py | 2 +- networkx/algorithms/tree/mst.py | 18 +++---- networkx/algorithms/tree/operations.py | 2 +- networkx/algorithms/tree/recognition.py | 8 +-- networkx/algorithms/triads.py | 14 +++--- networkx/algorithms/vitality.py | 2 +- networkx/algorithms/voronoi.py | 2 +- networkx/algorithms/walks.py | 2 +- networkx/algorithms/wiener.py | 2 +- networkx/classes/tests/dispatch_interface.py | 2 +- networkx/classes/tests/test_backends.py | 4 +- networkx/conftest.py | 8 +-- networkx/convert.py | 10 ++-- networkx/convert_matrix.py | 16 +++--- networkx/drawing/nx_agraph.py | 4 +- networkx/drawing/nx_pydot.py | 4 +- networkx/generators/atlas.py | 4 +- networkx/generators/classic.py | 40 +++++++-------- networkx/generators/cographs.py | 2 +- networkx/generators/community.py | 20 ++++---- networkx/generators/degree_seq.py | 14 +++--- networkx/generators/directed.py | 12 ++--- networkx/generators/duplication.py | 4 +- networkx/generators/ego.py | 2 +- networkx/generators/expanders.py | 6 +-- networkx/generators/geometric.py | 16 +++--- networkx/generators/harary_graph.py | 4 +- networkx/generators/internet_as_graphs.py | 2 +- networkx/generators/intersection.py | 6 +-- networkx/generators/interval_graph.py | 2 +- networkx/generators/joint_degree_seq.py | 8 +-- networkx/generators/lattice.py | 10 ++-- networkx/generators/line.py | 4 +- networkx/generators/mycielski.py | 4 +- networkx/generators/nonisomorphic_trees.py | 4 +- networkx/generators/random_clustered.py | 2 +- networkx/generators/random_graphs.py | 34 ++++++------- networkx/generators/small.py | 46 ++++++++--------- networkx/generators/social.py | 8 +-- networkx/generators/spectral_graph_forge.py | 2 +- networkx/generators/stochastic.py | 2 +- networkx/generators/sudoku.py | 2 +- networkx/generators/time_series.py | 2 +- networkx/generators/trees.py | 18 +++---- networkx/generators/triads.py | 2 +- networkx/linalg/algebraicconnectivity.py | 8 +-- networkx/linalg/attrmatrix.py | 4 +- networkx/linalg/bethehessianmatrix.py | 2 +- networkx/linalg/graphmatrix.py | 4 +- networkx/linalg/laplacianmatrix.py | 10 ++-- networkx/linalg/modularitymatrix.py | 4 +- networkx/linalg/spectrum.py | 10 ++-- networkx/readwrite/adjlist.py | 4 +- networkx/readwrite/edgelist.py | 6 +-- networkx/readwrite/gexf.py | 2 +- networkx/readwrite/gml.py | 4 +- networkx/readwrite/graph6.py | 4 +- networkx/readwrite/graphml.py | 4 +- networkx/readwrite/json_graph/adjacency.py | 2 +- networkx/readwrite/json_graph/cytoscape.py | 2 +- networkx/readwrite/json_graph/node_link.py | 2 +- networkx/readwrite/json_graph/tree.py | 2 +- networkx/readwrite/leda.py | 4 +- networkx/readwrite/multiline_adjlist.py | 4 +- networkx/readwrite/p2g.py | 4 +- networkx/readwrite/pajek.py | 4 +- networkx/readwrite/sparse6.py | 4 +- networkx/relabel.py | 4 +- networkx/utils/backends.py | 38 +++++++------- 234 files changed, 802 insertions(+), 794 deletions(-) diff --git a/doc/developer/nxeps/nxep-0003.rst b/doc/developer/nxeps/nxep-0003.rst index f3971ef3bbf..94916e117f5 100644 --- a/doc/developer/nxeps/nxep-0003.rst +++ b/doc/developer/nxeps/nxep-0003.rst @@ -290,7 +290,7 @@ Related Work This proposal is based on ideas and discussions from #3036 and #1393. -This proposal does not delve into backends using the `_dispatch` functionality +This proposal does not delve into backends using the `_dispatchable` functionality and whether we should be providing or allowing control over the builder functions for backend libraries. This is a potentially helpful discussion but is beyond the scope of this NXEP. diff --git a/doc/reference/utils.rst b/doc/reference/utils.rst index 21ae064076e..acd9c92584e 100644 --- a/doc/reference/utils.rst +++ b/doc/reference/utils.rst @@ -85,4 +85,4 @@ Backends .. autosummary:: :toctree: generated/ - _dispatch + _dispatchable diff --git a/networkx/__init__.py b/networkx/__init__.py index 34bb47f8cde..72d2ef6562f 100644 --- a/networkx/__init__.py +++ b/networkx/__init__.py @@ -17,7 +17,7 @@ from networkx.exception import * from networkx import utils -from networkx.utils.backends import _dispatch +from networkx.utils.backends import _dispatchable from networkx import classes from networkx.classes import filters diff --git a/networkx/algorithms/approximation/clique.py b/networkx/algorithms/approximation/clique.py index 4a3d8beba61..56443068633 100644 --- a/networkx/algorithms/approximation/clique.py +++ b/networkx/algorithms/approximation/clique.py @@ -13,7 +13,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def maximum_independent_set(G): """Returns an approximate maximum independent set. @@ -70,7 +70,7 @@ def maximum_independent_set(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def max_clique(G): r"""Find the Maximum Clique @@ -129,7 +129,7 @@ def max_clique(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def clique_removal(G): r"""Repeatedly remove cliques from the graph. @@ -182,7 +182,7 @@ def clique_removal(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def large_clique_size(G): """Find the size of a large clique in a graph. diff --git a/networkx/algorithms/approximation/clustering_coefficient.py b/networkx/algorithms/approximation/clustering_coefficient.py index e15ac68460b..f95c86d2a91 100644 --- a/networkx/algorithms/approximation/clustering_coefficient.py +++ b/networkx/algorithms/approximation/clustering_coefficient.py @@ -6,7 +6,7 @@ @not_implemented_for("directed") @py_random_state(2) -@nx._dispatch(name="approximate_average_clustering") +@nx._dispatchable(name="approximate_average_clustering") def average_clustering(G, trials=1000, seed=None): r"""Estimates the average clustering coefficient of G. diff --git a/networkx/algorithms/approximation/connectivity.py b/networkx/algorithms/approximation/connectivity.py index bc5e7125937..a2214ed128b 100644 --- a/networkx/algorithms/approximation/connectivity.py +++ b/networkx/algorithms/approximation/connectivity.py @@ -12,7 +12,7 @@ ] -@nx._dispatch(name="approximate_local_node_connectivity") +@nx._dispatchable(name="approximate_local_node_connectivity") def local_node_connectivity(G, source, target, cutoff=None): """Compute node connectivity between source and target. @@ -108,7 +108,7 @@ def local_node_connectivity(G, source, target, cutoff=None): return K -@nx._dispatch(name="approximate_node_connectivity") +@nx._dispatchable(name="approximate_node_connectivity") def node_connectivity(G, s=None, t=None): r"""Returns an approximation for node connectivity for a graph or digraph G. @@ -214,7 +214,7 @@ def neighbors(v): return K -@nx._dispatch(name="approximate_all_pairs_node_connectivity") +@nx._dispatchable(name="approximate_all_pairs_node_connectivity") def all_pairs_node_connectivity(G, nbunch=None, cutoff=None): """Compute node connectivity between all pairs of nodes. diff --git a/networkx/algorithms/approximation/distance_measures.py b/networkx/algorithms/approximation/distance_measures.py index 9b817b3317c..a6fece661b1 100644 --- a/networkx/algorithms/approximation/distance_measures.py +++ b/networkx/algorithms/approximation/distance_measures.py @@ -7,7 +7,7 @@ @py_random_state(1) -@nx._dispatch(name="approximate_diameter") +@nx._dispatchable(name="approximate_diameter") def diameter(G, seed=None): """Returns a lower bound on the diameter of the graph G. diff --git a/networkx/algorithms/approximation/dominating_set.py b/networkx/algorithms/approximation/dominating_set.py index 97edb172f94..691564cf9dc 100644 --- a/networkx/algorithms/approximation/dominating_set.py +++ b/networkx/algorithms/approximation/dominating_set.py @@ -20,7 +20,7 @@ # TODO Why doesn't this algorithm work for directed graphs? @not_implemented_for("directed") -@nx._dispatch(node_attrs="weight") +@nx._dispatchable(node_attrs="weight") def min_weighted_dominating_set(G, weight=None): r"""Returns a dominating set that approximates the minimum weight node dominating set. @@ -101,7 +101,7 @@ def _cost(node_and_neighborhood): return dom_set -@nx._dispatch +@nx._dispatchable def min_edge_dominating_set(G): r"""Returns minimum cardinality edge dominating set. diff --git a/networkx/algorithms/approximation/kcomponents.py b/networkx/algorithms/approximation/kcomponents.py index a5df6cc686c..b540bd5f4a6 100644 --- a/networkx/algorithms/approximation/kcomponents.py +++ b/networkx/algorithms/approximation/kcomponents.py @@ -14,7 +14,7 @@ @not_implemented_for("directed") -@nx._dispatch(name="approximate_k_components") +@nx._dispatchable(name="approximate_k_components") def k_components(G, min_density=0.95): r"""Returns the approximate k-component structure of a graph G. diff --git a/networkx/algorithms/approximation/matching.py b/networkx/algorithms/approximation/matching.py index 8f1c3501666..3a7c8a39b2e 100644 --- a/networkx/algorithms/approximation/matching.py +++ b/networkx/algorithms/approximation/matching.py @@ -13,7 +13,7 @@ __all__ = ["min_maximal_matching"] -@nx._dispatch +@nx._dispatchable def min_maximal_matching(G): r"""Returns the minimum maximal matching of G. That is, out of all maximal matchings of the graph G, the smallest is returned. diff --git a/networkx/algorithms/approximation/maxcut.py b/networkx/algorithms/approximation/maxcut.py index 31ccdc9fde3..0c30d224d2f 100644 --- a/networkx/algorithms/approximation/maxcut.py +++ b/networkx/algorithms/approximation/maxcut.py @@ -7,7 +7,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(1) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def randomized_partitioning(G, seed=None, p=0.5, weight=None): """Compute a random partitioning of the graph nodes and its cut value. @@ -53,7 +53,7 @@ def _swap_node_partition(cut, node): @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(2) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def one_exchange(G, initial_cut=None, seed=None, weight=None): """Compute a partitioning of the graphs nodes and the corresponding cut value. diff --git a/networkx/algorithms/approximation/ramsey.py b/networkx/algorithms/approximation/ramsey.py index 6f45c4f4971..5cb9fda0449 100644 --- a/networkx/algorithms/approximation/ramsey.py +++ b/networkx/algorithms/approximation/ramsey.py @@ -11,7 +11,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def ramsey_R2(G): r"""Compute the largest clique and largest independent set in `G`. diff --git a/networkx/algorithms/approximation/steinertree.py b/networkx/algorithms/approximation/steinertree.py index 50aea045fea..7751251ddf8 100644 --- a/networkx/algorithms/approximation/steinertree.py +++ b/networkx/algorithms/approximation/steinertree.py @@ -7,7 +7,7 @@ @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def metric_closure(G, weight="weight"): """Return the metric closure of a graph. @@ -126,7 +126,7 @@ def _remove_nonterminal_leaves(G, terminals): @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def steiner_tree(G, terminal_nodes, weight="weight", method=None): r"""Return an approximation to the minimum Steiner tree of a graph. diff --git a/networkx/algorithms/approximation/traveling_salesman.py b/networkx/algorithms/approximation/traveling_salesman.py index 2164ea25db6..111ed7d60c7 100644 --- a/networkx/algorithms/approximation/traveling_salesman.py +++ b/networkx/algorithms/approximation/traveling_salesman.py @@ -124,7 +124,7 @@ def move_one_node(soln, seed): @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def christofides(G, weight="weight", tree=None): """Approximate a solution of the traveling salesman problem @@ -197,7 +197,7 @@ def _shortcutting(circuit): return nodes -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def traveling_salesman_problem(G, weight="weight", nodes=None, cycle=True, method=None): """Find the shortest path in `G` connecting specified nodes @@ -340,7 +340,7 @@ def traveling_salesman_problem(G, weight="weight", nodes=None, cycle=True, metho @not_implemented_for("undirected") @py_random_state(2) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def asadpour_atsp(G, weight="weight", seed=None, source=None): """ Returns an approximate solution to the traveling salesman problem. @@ -490,7 +490,7 @@ def asadpour_atsp(G, weight="weight", seed=None, source=None): return _shortcutting(circuit) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def held_karp_ascent(G, weight="weight"): """ Minimizes the Held-Karp relaxation of the TSP for `G` @@ -801,7 +801,7 @@ def find_epsilon(k, d): return next(k_max.__iter__()).size(weight), z_star -@nx._dispatch +@nx._dispatchable def spanning_tree_distribution(G, z): """ Find the asadpour exponential distribution of spanning trees. @@ -912,7 +912,7 @@ def q(e): return gamma -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def greedy_tsp(G, weight="weight", source=None): """Return a low cost cycle starting at `source` and its cost. @@ -1005,7 +1005,7 @@ def greedy_tsp(G, weight="weight", source=None): @py_random_state(9) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def simulated_annealing_tsp( G, init_cycle, @@ -1224,7 +1224,7 @@ def simulated_annealing_tsp( @py_random_state(9) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def threshold_accepting_tsp( G, init_cycle, diff --git a/networkx/algorithms/approximation/treewidth.py b/networkx/algorithms/approximation/treewidth.py index 89ac39ce67d..c7c01a1cd4d 100644 --- a/networkx/algorithms/approximation/treewidth.py +++ b/networkx/algorithms/approximation/treewidth.py @@ -41,7 +41,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def treewidth_min_degree(G): """Returns a treewidth decomposition using the Minimum Degree heuristic. @@ -65,7 +65,7 @@ def treewidth_min_degree(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def treewidth_min_fill_in(G): """Returns a treewidth decomposition using the Minimum Fill-in heuristic. @@ -177,7 +177,7 @@ def min_fill_in_heuristic(graph): return min_fill_in_node -@nx._dispatch +@nx._dispatchable def treewidth_decomp(G, heuristic=min_fill_in_heuristic): """Returns a treewidth decomposition using the passed heuristic. diff --git a/networkx/algorithms/approximation/vertex_cover.py b/networkx/algorithms/approximation/vertex_cover.py index dbd7a123d02..c71399ebcc9 100644 --- a/networkx/algorithms/approximation/vertex_cover.py +++ b/networkx/algorithms/approximation/vertex_cover.py @@ -12,7 +12,7 @@ __all__ = ["min_weighted_vertex_cover"] -@nx._dispatch(node_attrs="weight") +@nx._dispatchable(node_attrs="weight") def min_weighted_vertex_cover(G, weight=None): r"""Returns an approximate minimum weighted vertex cover. diff --git a/networkx/algorithms/assortativity/connectivity.py b/networkx/algorithms/assortativity/connectivity.py index bd433ded595..c3fde0da68a 100644 --- a/networkx/algorithms/assortativity/connectivity.py +++ b/networkx/algorithms/assortativity/connectivity.py @@ -5,7 +5,7 @@ __all__ = ["average_degree_connectivity"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def average_degree_connectivity( G, source="in+out", target="in+out", nodes=None, weight=None ): diff --git a/networkx/algorithms/assortativity/correlation.py b/networkx/algorithms/assortativity/correlation.py index d517132ff21..c62e92f9c2e 100644 --- a/networkx/algorithms/assortativity/correlation.py +++ b/networkx/algorithms/assortativity/correlation.py @@ -15,7 +15,7 @@ ] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def degree_assortativity_coefficient(G, x="out", y="in", weight=None, nodes=None): """Compute degree assortativity of graph. @@ -100,7 +100,7 @@ def degree_assortativity_coefficient(G, x="out", y="in", weight=None, nodes=None return _numeric_ac(M, mapping=mapping) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def degree_pearson_correlation_coefficient(G, x="out", y="in", weight=None, nodes=None): """Compute degree assortativity of graph. @@ -159,7 +159,7 @@ def degree_pearson_correlation_coefficient(G, x="out", y="in", weight=None, node return sp.stats.pearsonr(x, y)[0] -@nx._dispatch(node_attrs="attribute") +@nx._dispatchable(node_attrs="attribute") def attribute_assortativity_coefficient(G, attribute, nodes=None): """Compute assortativity for node attributes. @@ -206,7 +206,7 @@ def attribute_assortativity_coefficient(G, attribute, nodes=None): return attribute_ac(M) -@nx._dispatch(node_attrs="attribute") +@nx._dispatchable(node_attrs="attribute") def numeric_assortativity_coefficient(G, attribute, nodes=None): """Compute assortativity for numerical node attributes. diff --git a/networkx/algorithms/assortativity/mixing.py b/networkx/algorithms/assortativity/mixing.py index 66b98797e69..577f5da56a9 100644 --- a/networkx/algorithms/assortativity/mixing.py +++ b/networkx/algorithms/assortativity/mixing.py @@ -14,7 +14,7 @@ ] -@nx._dispatch(node_attrs="attribute") +@nx._dispatchable(node_attrs="attribute") def attribute_mixing_dict(G, attribute, nodes=None, normalized=False): """Returns dictionary representation of mixing matrix for attribute. @@ -53,7 +53,7 @@ def attribute_mixing_dict(G, attribute, nodes=None, normalized=False): return mixing_dict(xy_iter, normalized=normalized) -@nx._dispatch(node_attrs="attribute") +@nx._dispatchable(node_attrs="attribute") def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, normalized=True): """Returns mixing matrix for attribute. @@ -113,7 +113,7 @@ def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, normalized=T return a -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def degree_mixing_dict(G, x="out", y="in", weight=None, nodes=None, normalized=False): """Returns dictionary representation of mixing matrix for degree. @@ -145,7 +145,7 @@ def degree_mixing_dict(G, x="out", y="in", weight=None, nodes=None, normalized=F return mixing_dict(xy_iter, normalized=normalized) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def degree_mixing_matrix( G, x="out", y="in", weight=None, nodes=None, normalized=True, mapping=None ): diff --git a/networkx/algorithms/assortativity/neighbor_degree.py b/networkx/algorithms/assortativity/neighbor_degree.py index a8980da766f..6488d041a8b 100644 --- a/networkx/algorithms/assortativity/neighbor_degree.py +++ b/networkx/algorithms/assortativity/neighbor_degree.py @@ -3,7 +3,7 @@ __all__ = ["average_neighbor_degree"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def average_neighbor_degree(G, source="out", target="out", nodes=None, weight=None): r"""Returns the average degree of the neighborhood of each node. diff --git a/networkx/algorithms/assortativity/pairs.py b/networkx/algorithms/assortativity/pairs.py index a3580d40324..5a1d6f8e1df 100644 --- a/networkx/algorithms/assortativity/pairs.py +++ b/networkx/algorithms/assortativity/pairs.py @@ -4,7 +4,7 @@ __all__ = ["node_attribute_xy", "node_degree_xy"] -@nx._dispatch(node_attrs="attribute") +@nx._dispatchable(node_attrs="attribute") def node_attribute_xy(G, attribute, nodes=None): """Returns iterator of node-attribute pairs for all edges in G. @@ -59,7 +59,7 @@ def node_attribute_xy(G, attribute, nodes=None): yield (uattr, vattr) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def node_degree_xy(G, x="out", y="in", weight=None, nodes=None): """Generate node degree-degree pairs for edges in G. diff --git a/networkx/algorithms/asteroidal.py b/networkx/algorithms/asteroidal.py index 65355fe6253..41e91390dff 100644 --- a/networkx/algorithms/asteroidal.py +++ b/networkx/algorithms/asteroidal.py @@ -18,7 +18,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def find_asteroidal_triple(G): r"""Find an asteroidal triple in the given graph. @@ -91,7 +91,7 @@ def find_asteroidal_triple(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def is_at_free(G): """Check if a graph is AT-free. @@ -125,7 +125,7 @@ def is_at_free(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def create_component_structure(G): r"""Create component structure for G. diff --git a/networkx/algorithms/bipartite/basic.py b/networkx/algorithms/bipartite/basic.py index 8b9120e27aa..d0a63a10fd1 100644 --- a/networkx/algorithms/bipartite/basic.py +++ b/networkx/algorithms/bipartite/basic.py @@ -17,7 +17,7 @@ ] -@nx._dispatch +@nx._dispatchable def color(G): """Returns a two-coloring of the graph. @@ -83,7 +83,7 @@ def neighbors(v): return color -@nx._dispatch +@nx._dispatchable def is_bipartite(G): """Returns True if graph G is bipartite, False if not. @@ -109,7 +109,7 @@ def is_bipartite(G): return False -@nx._dispatch +@nx._dispatchable def is_bipartite_node_set(G, nodes): """Returns True if nodes and G/nodes are a bipartition of G. @@ -154,7 +154,7 @@ def is_bipartite_node_set(G, nodes): return True -@nx._dispatch +@nx._dispatchable def sets(G, top_nodes=None): """Returns bipartite node sets of graph G. @@ -221,7 +221,7 @@ def sets(G, top_nodes=None): return (X, Y) -@nx._dispatch(graphs="B") +@nx._dispatchable(graphs="B") def density(B, nodes): """Returns density of bipartite graph B. @@ -274,7 +274,7 @@ def density(B, nodes): return d -@nx._dispatch(graphs="B", edge_attrs="weight") +@nx._dispatchable(graphs="B", edge_attrs="weight") def degrees(B, nodes, weight=None): """Returns the degrees of the two node sets in the bipartite graph B. diff --git a/networkx/algorithms/bipartite/centrality.py b/networkx/algorithms/bipartite/centrality.py index a904da3528f..42d7270ee7d 100644 --- a/networkx/algorithms/bipartite/centrality.py +++ b/networkx/algorithms/bipartite/centrality.py @@ -3,7 +3,7 @@ __all__ = ["degree_centrality", "betweenness_centrality", "closeness_centrality"] -@nx._dispatch(name="bipartite_degree_centrality") +@nx._dispatchable(name="bipartite_degree_centrality") def degree_centrality(G, nodes): r"""Compute the degree centrality for nodes in a bipartite network. @@ -78,7 +78,7 @@ def degree_centrality(G, nodes): return centrality -@nx._dispatch(name="bipartite_betweenness_centrality") +@nx._dispatchable(name="bipartite_betweenness_centrality") def betweenness_centrality(G, nodes): r"""Compute betweenness centrality for nodes in a bipartite network. @@ -182,7 +182,7 @@ def betweenness_centrality(G, nodes): return betweenness -@nx._dispatch(name="bipartite_closeness_centrality") +@nx._dispatchable(name="bipartite_closeness_centrality") def closeness_centrality(G, nodes, normalized=True): r"""Compute the closeness centrality for nodes in a bipartite network. diff --git a/networkx/algorithms/bipartite/cluster.py b/networkx/algorithms/bipartite/cluster.py index f10d7efd117..d9611527759 100644 --- a/networkx/algorithms/bipartite/cluster.py +++ b/networkx/algorithms/bipartite/cluster.py @@ -29,7 +29,7 @@ def cc_min(nu, nv): modes = {"dot": cc_dot, "min": cc_min, "max": cc_max} -@nx._dispatch +@nx._dispatchable def latapy_clustering(G, nodes=None, mode="dot"): r"""Compute a bipartite clustering coefficient for nodes. @@ -134,7 +134,7 @@ def latapy_clustering(G, nodes=None, mode="dot"): clustering = latapy_clustering -@nx._dispatch(name="bipartite_average_clustering") +@nx._dispatchable(name="bipartite_average_clustering") def average_clustering(G, nodes=None, mode="dot"): r"""Compute the average bipartite clustering coefficient. @@ -211,7 +211,7 @@ def average_clustering(G, nodes=None, mode="dot"): return sum(ccs[v] for v in nodes) / len(nodes) -@nx._dispatch +@nx._dispatchable def robins_alexander_clustering(G): r"""Compute the bipartite clustering of G. diff --git a/networkx/algorithms/bipartite/covering.py b/networkx/algorithms/bipartite/covering.py index 8669b4b1681..720c63ac40c 100644 --- a/networkx/algorithms/bipartite/covering.py +++ b/networkx/algorithms/bipartite/covering.py @@ -10,7 +10,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(name="bipartite_min_edge_cover") +@nx._dispatchable(name="bipartite_min_edge_cover") def min_edge_cover(G, matching_algorithm=None): """Returns a set of edges which constitutes the minimum edge cover of the graph. diff --git a/networkx/algorithms/bipartite/edgelist.py b/networkx/algorithms/bipartite/edgelist.py index 90c449a3a14..b9b2a82e05a 100644 --- a/networkx/algorithms/bipartite/edgelist.py +++ b/networkx/algorithms/bipartite/edgelist.py @@ -146,7 +146,7 @@ def generate_edgelist(G, delimiter=" ", data=True): yield delimiter.join(map(str, edge)) -@nx._dispatch(name="bipartite_parse_edgelist", graphs=None) +@nx._dispatchable(name="bipartite_parse_edgelist", graphs=None) def parse_edgelist( lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True ): @@ -268,7 +268,7 @@ def parse_edgelist( @open_file(0, mode="rb") -@nx._dispatch(name="bipartite_read_edgelist", graphs=None) +@nx._dispatchable(name="bipartite_read_edgelist", graphs=None) def read_edgelist( path, comments="#", diff --git a/networkx/algorithms/bipartite/generators.py b/networkx/algorithms/bipartite/generators.py index 9c8bfc0ef29..2774c6fd409 100644 --- a/networkx/algorithms/bipartite/generators.py +++ b/networkx/algorithms/bipartite/generators.py @@ -20,7 +20,7 @@ ] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) @nodes_or_number([0, 1]) def complete_bipartite_graph(n1, n2, create_using=None): """Returns the complete bipartite graph `K_{n_1,n_2}`. @@ -67,7 +67,7 @@ def complete_bipartite_graph(n1, n2, create_using=None): @py_random_state(3) -@nx._dispatch(name="bipartite_configuration_model", graphs=None) +@nx._dispatchable(name="bipartite_configuration_model", graphs=None) def configuration_model(aseq, bseq, create_using=None, seed=None): """Returns a random bipartite graph from two given degree sequences. @@ -138,7 +138,7 @@ def configuration_model(aseq, bseq, create_using=None, seed=None): return G -@nx._dispatch(name="bipartite_havel_hakimi_graph", graphs=None) +@nx._dispatchable(name="bipartite_havel_hakimi_graph", graphs=None) def havel_hakimi_graph(aseq, bseq, create_using=None): """Returns a bipartite graph from two given degree sequences using a Havel-Hakimi style construction. @@ -213,7 +213,7 @@ def havel_hakimi_graph(aseq, bseq, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def reverse_havel_hakimi_graph(aseq, bseq, create_using=None): """Returns a bipartite graph from two given degree sequences using a Havel-Hakimi style construction. @@ -287,7 +287,7 @@ def reverse_havel_hakimi_graph(aseq, bseq, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): """Returns a bipartite graph from two given degree sequences using an alternating Havel-Hakimi style construction. @@ -366,7 +366,7 @@ def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def preferential_attachment_graph(aseq, p, create_using=None, seed=None): """Create a bipartite graph with a preferential attachment model from a given single degree sequence. @@ -438,7 +438,7 @@ def preferential_attachment_graph(aseq, p, create_using=None, seed=None): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_graph(n, m, p, seed=None, directed=False): """Returns a bipartite random graph. @@ -525,7 +525,7 @@ def random_graph(n, m, p, seed=None, directed=False): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def gnmk_random_graph(n, m, k, seed=None, directed=False): """Returns a random bipartite graph G_{n,m,k}. diff --git a/networkx/algorithms/bipartite/matching.py b/networkx/algorithms/bipartite/matching.py index 17d55614bcd..48149ab9e31 100644 --- a/networkx/algorithms/bipartite/matching.py +++ b/networkx/algorithms/bipartite/matching.py @@ -54,7 +54,7 @@ INFINITY = float("inf") -@nx._dispatch +@nx._dispatchable def hopcroft_karp_matching(G, top_nodes=None): """Returns the maximum cardinality matching of the bipartite graph `G`. @@ -181,7 +181,7 @@ def depth_first_search(v): return dict(itertools.chain(leftmatches.items(), rightmatches.items())) -@nx._dispatch +@nx._dispatchable def eppstein_matching(G, top_nodes=None): """Returns the maximum cardinality matching of the bipartite graph `G`. @@ -420,7 +420,7 @@ def _connected_by_alternating_paths(G, matching, targets): } -@nx._dispatch +@nx._dispatchable def to_vertex_cover(G, matching, top_nodes=None): """Returns the minimum vertex cover corresponding to the given maximum matching of the bipartite graph `G`. @@ -501,7 +501,7 @@ def to_vertex_cover(G, matching, top_nodes=None): maximum_matching = hopcroft_karp_matching -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def minimum_weight_full_matching(G, top_nodes=None, weight="weight"): r"""Returns a minimum weight full matching of the bipartite graph `G`. diff --git a/networkx/algorithms/bipartite/matrix.py b/networkx/algorithms/bipartite/matrix.py index e5679677574..8809e21b62e 100644 --- a/networkx/algorithms/bipartite/matrix.py +++ b/networkx/algorithms/bipartite/matrix.py @@ -11,7 +11,7 @@ __all__ = ["biadjacency_matrix", "from_biadjacency_matrix"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def biadjacency_matrix( G, row_order, column_order=None, dtype=None, weight="weight", format="csr" ): @@ -110,7 +110,7 @@ def biadjacency_matrix( raise nx.NetworkXError(f"Unknown sparse array format: {format}") from err -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_biadjacency_matrix(A, create_using=None, edge_attribute="weight"): r"""Creates a new bipartite graph from a biadjacency matrix given as a SciPy sparse array. diff --git a/networkx/algorithms/bipartite/projection.py b/networkx/algorithms/bipartite/projection.py index 57f960e13b3..e0afe58a870 100644 --- a/networkx/algorithms/bipartite/projection.py +++ b/networkx/algorithms/bipartite/projection.py @@ -12,7 +12,7 @@ ] -@nx._dispatch(graphs="B", preserve_node_attrs=True, preserve_graph_attrs=True) +@nx._dispatchable(graphs="B", preserve_node_attrs=True, preserve_graph_attrs=True) def projected_graph(B, nodes, multigraph=False): r"""Returns the projection of B onto one of its node sets. @@ -117,7 +117,7 @@ def projected_graph(B, nodes, multigraph=False): @not_implemented_for("multigraph") -@nx._dispatch(graphs="B") +@nx._dispatchable(graphs="B") def weighted_projected_graph(B, nodes, ratio=False): r"""Returns a weighted projection of B onto one of its node sets. @@ -218,7 +218,7 @@ def weighted_projected_graph(B, nodes, ratio=False): @not_implemented_for("multigraph") -@nx._dispatch(graphs="B") +@nx._dispatchable(graphs="B") def collaboration_weighted_projected_graph(B, nodes): r"""Newman's weighted projection of B onto one of its node sets. @@ -313,7 +313,7 @@ def collaboration_weighted_projected_graph(B, nodes): @not_implemented_for("multigraph") -@nx._dispatch(graphs="B") +@nx._dispatchable(graphs="B") def overlap_weighted_projected_graph(B, nodes, jaccard=True): r"""Overlap weighted projection of B onto one of its node sets. @@ -413,7 +413,7 @@ def overlap_weighted_projected_graph(B, nodes, jaccard=True): @not_implemented_for("multigraph") -@nx._dispatch(graphs="B", preserve_all_attrs=True) +@nx._dispatchable(graphs="B", preserve_all_attrs=True) def generic_weighted_projected_graph(B, nodes, weight_function=None): r"""Weighted projection of B with a user-specified weight function. diff --git a/networkx/algorithms/bipartite/redundancy.py b/networkx/algorithms/bipartite/redundancy.py index 04b3ae9ca75..7a44d212896 100644 --- a/networkx/algorithms/bipartite/redundancy.py +++ b/networkx/algorithms/bipartite/redundancy.py @@ -7,7 +7,7 @@ __all__ = ["node_redundancy"] -@nx._dispatch +@nx._dispatchable def node_redundancy(G, nodes=None): r"""Computes the node redundancy coefficients for the nodes in the bipartite graph `G`. diff --git a/networkx/algorithms/bipartite/spectral.py b/networkx/algorithms/bipartite/spectral.py index f4b414243ac..0ecd283775f 100644 --- a/networkx/algorithms/bipartite/spectral.py +++ b/networkx/algorithms/bipartite/spectral.py @@ -6,7 +6,7 @@ __all__ = ["spectral_bipartivity"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def spectral_bipartivity(G, nodes=None, weight="weight"): """Returns the spectral bipartivity. diff --git a/networkx/algorithms/boundary.py b/networkx/algorithms/boundary.py index ea97cee6efb..fef9ba22369 100644 --- a/networkx/algorithms/boundary.py +++ b/networkx/algorithms/boundary.py @@ -15,7 +15,7 @@ __all__ = ["edge_boundary", "node_boundary"] -@nx._dispatch(edge_attrs={"data": "default"}, preserve_edge_attrs="data") +@nx._dispatchable(edge_attrs={"data": "default"}, preserve_edge_attrs="data") def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None): """Returns the edge boundary of `nbunch1`. @@ -106,7 +106,7 @@ def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None ) -@nx._dispatch +@nx._dispatchable def node_boundary(G, nbunch1, nbunch2=None): """Returns the node boundary of `nbunch1`. diff --git a/networkx/algorithms/bridges.py b/networkx/algorithms/bridges.py index 106120e2fde..e076a256cb8 100644 --- a/networkx/algorithms/bridges.py +++ b/networkx/algorithms/bridges.py @@ -8,7 +8,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def bridges(G, root=None): """Generate all bridges in a graph. @@ -81,7 +81,7 @@ def bridges(G, root=None): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def has_bridges(G, root=None): """Decide whether a graph has any bridges. @@ -142,7 +142,7 @@ def has_bridges(G, root=None): @not_implemented_for("multigraph") @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def local_bridges(G, with_span=True, weight=None): """Iterate over local bridges of `G` optionally computing the span diff --git a/networkx/algorithms/centrality/betweenness.py b/networkx/algorithms/centrality/betweenness.py index b4b1f3963b0..4f44fb19ba0 100644 --- a/networkx/algorithms/centrality/betweenness.py +++ b/networkx/algorithms/centrality/betweenness.py @@ -12,7 +12,7 @@ @py_random_state(5) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def betweenness_centrality( G, k=None, normalized=True, weight=None, endpoints=False, seed=None ): @@ -154,7 +154,7 @@ def betweenness_centrality( @py_random_state(4) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=None): r"""Compute betweenness centrality for edges. diff --git a/networkx/algorithms/centrality/betweenness_subset.py b/networkx/algorithms/centrality/betweenness_subset.py index e6c1acdf4ff..7f9967e964c 100644 --- a/networkx/algorithms/centrality/betweenness_subset.py +++ b/networkx/algorithms/centrality/betweenness_subset.py @@ -16,7 +16,7 @@ ] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def betweenness_centrality_subset(G, sources, targets, normalized=False, weight=None): r"""Compute betweenness centrality for a subset of nodes. @@ -114,7 +114,7 @@ def betweenness_centrality_subset(G, sources, targets, normalized=False, weight= return b -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def edge_betweenness_centrality_subset( G, sources, targets, normalized=False, weight=None ): diff --git a/networkx/algorithms/centrality/closeness.py b/networkx/algorithms/centrality/closeness.py index 6a95ac14ef8..3527c468f33 100644 --- a/networkx/algorithms/centrality/closeness.py +++ b/networkx/algorithms/centrality/closeness.py @@ -10,7 +10,7 @@ __all__ = ["closeness_centrality", "incremental_closeness_centrality"] -@nx._dispatch(edge_attrs="distance") +@nx._dispatchable(edge_attrs="distance") def closeness_centrality(G, u=None, distance=None, wf_improved=True): r"""Compute closeness centrality for nodes. @@ -137,7 +137,7 @@ def closeness_centrality(G, u=None, distance=None, wf_improved=True): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def incremental_closeness_centrality( G, edge, prev_cc=None, insertion=True, wf_improved=True ): diff --git a/networkx/algorithms/centrality/current_flow_betweenness.py b/networkx/algorithms/centrality/current_flow_betweenness.py index ea1b2c8f2f4..9e68676b6a0 100644 --- a/networkx/algorithms/centrality/current_flow_betweenness.py +++ b/networkx/algorithms/centrality/current_flow_betweenness.py @@ -21,7 +21,7 @@ @not_implemented_for("directed") @py_random_state(7) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def approximate_current_flow_betweenness_centrality( G, normalized=True, @@ -144,7 +144,7 @@ def approximate_current_flow_betweenness_centrality( @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def current_flow_betweenness_centrality( G, normalized=True, weight=None, dtype=float, solver="full" ): @@ -241,7 +241,7 @@ def current_flow_betweenness_centrality( @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def edge_current_flow_betweenness_centrality( G, normalized=True, weight=None, dtype=float, solver="full" ): diff --git a/networkx/algorithms/centrality/current_flow_betweenness_subset.py b/networkx/algorithms/centrality/current_flow_betweenness_subset.py index debfca27f55..38e744a835a 100644 --- a/networkx/algorithms/centrality/current_flow_betweenness_subset.py +++ b/networkx/algorithms/centrality/current_flow_betweenness_subset.py @@ -10,7 +10,7 @@ @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def current_flow_betweenness_centrality_subset( G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu" ): @@ -120,7 +120,7 @@ def current_flow_betweenness_centrality_subset( @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def edge_current_flow_betweenness_centrality_subset( G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu" ): diff --git a/networkx/algorithms/centrality/current_flow_closeness.py b/networkx/algorithms/centrality/current_flow_closeness.py index daefbae902b..c4c8dd56c3e 100644 --- a/networkx/algorithms/centrality/current_flow_closeness.py +++ b/networkx/algorithms/centrality/current_flow_closeness.py @@ -11,7 +11,7 @@ @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def current_flow_closeness_centrality(G, weight=None, dtype=float, solver="lu"): """Compute current-flow closeness centrality for nodes. diff --git a/networkx/algorithms/centrality/degree_alg.py b/networkx/algorithms/centrality/degree_alg.py index 2631730dbc0..ea53f41ea3e 100644 --- a/networkx/algorithms/centrality/degree_alg.py +++ b/networkx/algorithms/centrality/degree_alg.py @@ -5,7 +5,7 @@ __all__ = ["degree_centrality", "in_degree_centrality", "out_degree_centrality"] -@nx._dispatch +@nx._dispatchable def degree_centrality(G): """Compute the degree centrality for nodes. @@ -50,7 +50,7 @@ def degree_centrality(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def in_degree_centrality(G): """Compute the in-degree centrality for nodes. @@ -100,7 +100,7 @@ def in_degree_centrality(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def out_degree_centrality(G): """Compute the out-degree centrality for nodes. diff --git a/networkx/algorithms/centrality/dispersion.py b/networkx/algorithms/centrality/dispersion.py index a551c387d88..a3fa68583a9 100644 --- a/networkx/algorithms/centrality/dispersion.py +++ b/networkx/algorithms/centrality/dispersion.py @@ -5,7 +5,7 @@ __all__ = ["dispersion"] -@nx._dispatch +@nx._dispatchable def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0): r"""Calculate dispersion between `u` and `v` in `G`. diff --git a/networkx/algorithms/centrality/eigenvector.py b/networkx/algorithms/centrality/eigenvector.py index 267e7b51027..f7fcbf78060 100644 --- a/networkx/algorithms/centrality/eigenvector.py +++ b/networkx/algorithms/centrality/eigenvector.py @@ -8,7 +8,7 @@ @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, weight=None): r"""Compute the eigenvector centrality for the graph G. @@ -193,7 +193,7 @@ def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, weight=None raise nx.PowerIterationFailedConvergence(max_iter) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def eigenvector_centrality_numpy(G, weight=None, max_iter=50, tol=0): r"""Compute the eigenvector centrality for the graph G. diff --git a/networkx/algorithms/centrality/flow_matrix.py b/networkx/algorithms/centrality/flow_matrix.py index e9cd7e26016..3874f6b2ffe 100644 --- a/networkx/algorithms/centrality/flow_matrix.py +++ b/networkx/algorithms/centrality/flow_matrix.py @@ -3,7 +3,7 @@ import networkx as nx -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def flow_matrix_row(G, weight=None, dtype=float, solver="lu"): # Generate a row of the current-flow matrix import numpy as np diff --git a/networkx/algorithms/centrality/group.py b/networkx/algorithms/centrality/group.py index ff17d8f32ed..5819c357d03 100644 --- a/networkx/algorithms/centrality/group.py +++ b/networkx/algorithms/centrality/group.py @@ -19,7 +19,7 @@ ] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def group_betweenness_centrality(G, C, normalized=True, weight=None, endpoints=False): r"""Compute the group betweenness centrality for a group of nodes. @@ -236,7 +236,7 @@ def _group_preprocessing(G, set_v, weight): return PB, sigma, D -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def prominent_group( G, k, weight=None, C=None, endpoints=False, normalized=True, greedy=False ): @@ -543,7 +543,7 @@ def _heuristic(k, root, DF_tree, D, nodes, greedy): return node_p, node_m, DF_tree -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def group_closeness_centrality(G, S, weight=None): r"""Compute the group closeness centrality for a group of nodes. @@ -640,7 +640,7 @@ def group_closeness_centrality(G, S, weight=None): return closeness -@nx._dispatch +@nx._dispatchable def group_degree_centrality(G, S): """Compute the group degree centrality for a group of nodes. @@ -692,7 +692,7 @@ def group_degree_centrality(G, S): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def group_in_degree_centrality(G, S): """Compute the group in-degree centrality for a group of nodes. @@ -739,7 +739,7 @@ def group_in_degree_centrality(G, S): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def group_out_degree_centrality(G, S): """Compute the group out-degree centrality for a group of nodes. diff --git a/networkx/algorithms/centrality/harmonic.py b/networkx/algorithms/centrality/harmonic.py index 86b5020f96c..9cd9f7f0839 100644 --- a/networkx/algorithms/centrality/harmonic.py +++ b/networkx/algorithms/centrality/harmonic.py @@ -6,7 +6,7 @@ __all__ = ["harmonic_centrality"] -@nx._dispatch(edge_attrs="distance") +@nx._dispatchable(edge_attrs="distance") def harmonic_centrality(G, nbunch=None, distance=None, sources=None): r"""Compute harmonic centrality for nodes. diff --git a/networkx/algorithms/centrality/katz.py b/networkx/algorithms/centrality/katz.py index 543fd879b97..527ec622fc4 100644 --- a/networkx/algorithms/centrality/katz.py +++ b/networkx/algorithms/centrality/katz.py @@ -8,7 +8,7 @@ @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def katz_centrality( G, alpha=0.1, @@ -194,7 +194,7 @@ def katz_centrality( @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, weight=None): r"""Compute the Katz centrality for the graph G. diff --git a/networkx/algorithms/centrality/laplacian.py b/networkx/algorithms/centrality/laplacian.py index d8a58e6b322..92ad463884b 100644 --- a/networkx/algorithms/centrality/laplacian.py +++ b/networkx/algorithms/centrality/laplacian.py @@ -6,7 +6,7 @@ __all__ = ["laplacian_centrality"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def laplacian_centrality( G, normalized=True, nodelist=None, weight="weight", walk_type=None, alpha=0.95 ): diff --git a/networkx/algorithms/centrality/load.py b/networkx/algorithms/centrality/load.py index 9a81cc43282..50bc6210b31 100644 --- a/networkx/algorithms/centrality/load.py +++ b/networkx/algorithms/centrality/load.py @@ -6,7 +6,7 @@ __all__ = ["load_centrality", "edge_load_centrality"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def newman_betweenness_centrality(G, v=None, cutoff=None, normalized=True, weight=None): """Compute load centrality for nodes. @@ -136,7 +136,7 @@ def _node_betweenness(G, source, cutoff=False, normalized=True, weight=None): load_centrality = newman_betweenness_centrality -@nx._dispatch +@nx._dispatchable def edge_load_centrality(G, cutoff=False): """Compute edge load. diff --git a/networkx/algorithms/centrality/percolation.py b/networkx/algorithms/centrality/percolation.py index cc5d5ce6d7d..0d4c87132b4 100644 --- a/networkx/algorithms/centrality/percolation.py +++ b/networkx/algorithms/centrality/percolation.py @@ -11,7 +11,7 @@ __all__ = ["percolation_centrality"] -@nx._dispatch(node_attrs="attribute", edge_attrs="weight") +@nx._dispatchable(node_attrs="attribute", edge_attrs="weight") def percolation_centrality(G, attribute="percolation", states=None, weight=None): r"""Compute the percolation centrality for nodes. diff --git a/networkx/algorithms/centrality/reaching.py b/networkx/algorithms/centrality/reaching.py index e93c8e6d8cd..63ecc216eae 100644 --- a/networkx/algorithms/centrality/reaching.py +++ b/networkx/algorithms/centrality/reaching.py @@ -31,7 +31,7 @@ def _average_weight(G, path, weight=None): return total_weight / path_length -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def global_reaching_centrality(G, weight=None, normalized=True): """Returns the global reaching centrality of a directed graph. @@ -119,7 +119,7 @@ def as_distance(u, v, d): return sum(max_lrc - c for c in lrc) / (len(G) - 1) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def local_reaching_centrality(G, v, paths=None, weight=None, normalized=True): """Returns the local reaching centrality of a node in a directed graph. diff --git a/networkx/algorithms/centrality/second_order.py b/networkx/algorithms/centrality/second_order.py index 4bdb1f52141..b08fe66b71f 100644 --- a/networkx/algorithms/centrality/second_order.py +++ b/networkx/algorithms/centrality/second_order.py @@ -39,7 +39,7 @@ @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def second_order_centrality(G, weight="weight"): """Compute the second order centrality for nodes of G. diff --git a/networkx/algorithms/centrality/subgraph_alg.py b/networkx/algorithms/centrality/subgraph_alg.py index c615b489201..3234e854b3c 100644 --- a/networkx/algorithms/centrality/subgraph_alg.py +++ b/networkx/algorithms/centrality/subgraph_alg.py @@ -14,7 +14,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def subgraph_centrality_exp(G): r"""Returns the subgraph centrality for each node of G. @@ -98,7 +98,7 @@ def subgraph_centrality_exp(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def subgraph_centrality(G): r"""Returns subgraph centrality for each node in G. @@ -189,7 +189,7 @@ def subgraph_centrality(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def communicability_betweenness_centrality(G): r"""Returns subgraph communicability for all pairs of nodes in G. @@ -291,7 +291,7 @@ def communicability_betweenness_centrality(G): return cbc -@nx._dispatch +@nx._dispatchable def estrada_index(G): r"""Returns the Estrada index of a the graph G. diff --git a/networkx/algorithms/centrality/trophic.py b/networkx/algorithms/centrality/trophic.py index cfc7ea4f206..d6dcca1526f 100644 --- a/networkx/algorithms/centrality/trophic.py +++ b/networkx/algorithms/centrality/trophic.py @@ -6,7 +6,7 @@ @not_implemented_for("undirected") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def trophic_levels(G, weight="weight"): r"""Compute the trophic levels of nodes. @@ -82,7 +82,7 @@ def trophic_levels(G, weight="weight"): @not_implemented_for("undirected") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def trophic_differences(G, weight="weight"): r"""Compute the trophic differences of the edges of a directed graph. @@ -117,7 +117,7 @@ def trophic_differences(G, weight="weight"): @not_implemented_for("undirected") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def trophic_incoherence_parameter(G, weight="weight", cannibalism=False): r"""Compute the trophic incoherence parameter of a graph. diff --git a/networkx/algorithms/centrality/voterank_alg.py b/networkx/algorithms/centrality/voterank_alg.py index ca6bd913479..063dfdd64b3 100644 --- a/networkx/algorithms/centrality/voterank_alg.py +++ b/networkx/algorithms/centrality/voterank_alg.py @@ -4,7 +4,7 @@ __all__ = ["voterank"] -@nx._dispatch +@nx._dispatchable def voterank(G, number_of_nodes=None): """Select a list of influential nodes in a graph using VoteRank algorithm diff --git a/networkx/algorithms/chains.py b/networkx/algorithms/chains.py index 289bc1c3dd4..ae342d9c866 100644 --- a/networkx/algorithms/chains.py +++ b/networkx/algorithms/chains.py @@ -8,7 +8,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def chain_decomposition(G, root=None): """Returns the chain decomposition of a graph. diff --git a/networkx/algorithms/chordal.py b/networkx/algorithms/chordal.py index 0793761027a..875bf24d14e 100644 --- a/networkx/algorithms/chordal.py +++ b/networkx/algorithms/chordal.py @@ -28,7 +28,7 @@ class NetworkXTreewidthBoundExceeded(nx.NetworkXException): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def is_chordal(G): """Checks whether G is a chordal graph. @@ -88,7 +88,7 @@ def is_chordal(G): return len(_find_chordality_breaker(G)) == 0 -@nx._dispatch +@nx._dispatchable def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize): """Returns the set of induced nodes in the path from s to t. @@ -168,7 +168,7 @@ def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize): return induced_nodes -@nx._dispatch +@nx._dispatchable def chordal_graph_cliques(G): """Returns all maximal cliques of a chordal graph. @@ -241,7 +241,7 @@ def chordal_graph_cliques(G): yield frozenset(clique_wanna_be) -@nx._dispatch +@nx._dispatchable def chordal_graph_treewidth(G): """Returns the treewidth of the chordal graph G. @@ -369,7 +369,7 @@ def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def complete_to_chordal_graph(G): """Return a copy of G completed to a chordal graph diff --git a/networkx/algorithms/clique.py b/networkx/algorithms/clique.py index 7fd7e81665a..cb7210854e0 100644 --- a/networkx/algorithms/clique.py +++ b/networkx/algorithms/clique.py @@ -26,7 +26,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def enumerate_all_cliques(G): """Returns all cliques in an undirected graph. @@ -98,7 +98,7 @@ def enumerate_all_cliques(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def find_cliques(G, nodes=None): """Returns all maximal cliques in an undirected graph. @@ -294,7 +294,7 @@ def find_cliques(G, nodes=None): # TODO Should this also be not implemented for directed graphs? -@nx._dispatch +@nx._dispatchable def find_cliques_recursive(G, nodes=None): """Returns all maximal cliques in a graph. @@ -412,7 +412,7 @@ def expand(subg, cand): return expand(subg_init, cand_init) -@nx._dispatch +@nx._dispatchable def make_max_clique_graph(G, create_using=None): """Returns the maximal clique graph of the given graph. @@ -459,7 +459,7 @@ def make_max_clique_graph(G, create_using=None): return B -@nx._dispatch +@nx._dispatchable def make_clique_bipartite(G, fpos=None, create_using=None, name=None): """Returns the bipartite clique graph corresponding to `G`. @@ -508,7 +508,7 @@ def make_clique_bipartite(G, fpos=None, create_using=None, name=None): return B -@nx._dispatch +@nx._dispatchable def node_clique_number(G, nodes=None, cliques=None, separate_nodes=False): """Returns the size of the largest maximal clique containing each given node. @@ -698,7 +698,7 @@ def find_max_weight_clique(self): @not_implemented_for("directed") -@nx._dispatch(node_attrs="weight") +@nx._dispatchable(node_attrs="weight") def max_weight_clique(G, weight="weight"): """Find a maximum weight clique in G. diff --git a/networkx/algorithms/cluster.py b/networkx/algorithms/cluster.py index 4297d5c2862..35c86daeb58 100644 --- a/networkx/algorithms/cluster.py +++ b/networkx/algorithms/cluster.py @@ -17,7 +17,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def triangles(G, nodes=None): """Compute the number of triangles. @@ -247,7 +247,7 @@ def wt(u, v): yield (i, dtotal, dbidirectional, directed_triangles) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def average_clustering(G, nodes=None, weight=None, count_zeros=True): r"""Compute the average clustering coefficient for the graph G. @@ -307,7 +307,7 @@ def average_clustering(G, nodes=None, weight=None, count_zeros=True): return sum(c) / len(c) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def clustering(G, nodes=None, weight=None): r"""Compute the clustering coefficient for nodes. @@ -422,7 +422,7 @@ def clustering(G, nodes=None, weight=None): return clusterc -@nx._dispatch +@nx._dispatchable def transitivity(G): r"""Compute graph transitivity, the fraction of all possible triangles present in G. @@ -461,7 +461,7 @@ def transitivity(G): return 0 if triangles == 0 else triangles / contri -@nx._dispatch +@nx._dispatchable def square_clustering(G, nodes=None): r"""Compute the squares clustering coefficient for nodes. @@ -540,7 +540,7 @@ def square_clustering(G, nodes=None): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def generalized_degree(G, nodes=None): r"""Compute the generalized degree for nodes. diff --git a/networkx/algorithms/coloring/equitable_coloring.py b/networkx/algorithms/coloring/equitable_coloring.py index af1fb5a7e7c..659609664c1 100644 --- a/networkx/algorithms/coloring/equitable_coloring.py +++ b/networkx/algorithms/coloring/equitable_coloring.py @@ -9,14 +9,14 @@ __all__ = ["equitable_color"] -@nx._dispatch +@nx._dispatchable def is_coloring(G, coloring): """Determine if the coloring is a valid coloring for the graph G.""" # Verify that the coloring is valid. return all(coloring[s] != coloring[d] for s, d in G.edges) -@nx._dispatch +@nx._dispatchable def is_equitable(G, coloring, num_colors=None): """Determines if the coloring is valid and equitable for the graph G.""" @@ -112,7 +112,7 @@ def move_witnesses(src_color, dst_color, N, H, F, C, T_cal, L): X = Y -@nx._dispatch +@nx._dispatchable def pad_graph(G, num_colors): """Add a disconnected complete clique K_p such that the number of nodes in the graph becomes a multiple of `num_colors`. @@ -386,7 +386,7 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None): break -@nx._dispatch +@nx._dispatchable def equitable_color(G, num_colors): """Provides an equitable coloring for nodes of `G`. diff --git a/networkx/algorithms/coloring/greedy_coloring.py b/networkx/algorithms/coloring/greedy_coloring.py index efc70f30eb8..661f14e0149 100644 --- a/networkx/algorithms/coloring/greedy_coloring.py +++ b/networkx/algorithms/coloring/greedy_coloring.py @@ -20,7 +20,7 @@ ] -@nx._dispatch +@nx._dispatchable def strategy_largest_first(G, colors): """Returns a list of the nodes of ``G`` in decreasing order by degree. @@ -32,7 +32,7 @@ def strategy_largest_first(G, colors): @py_random_state(2) -@nx._dispatch +@nx._dispatchable def strategy_random_sequential(G, colors, seed=None): """Returns a random permutation of the nodes of ``G`` as a list. @@ -47,7 +47,7 @@ def strategy_random_sequential(G, colors, seed=None): return nodes -@nx._dispatch +@nx._dispatchable def strategy_smallest_last(G, colors): """Returns a deque of the nodes of ``G``, "smallest" last. @@ -121,7 +121,7 @@ def _maximal_independent_set(G): return result -@nx._dispatch +@nx._dispatchable def strategy_independent_set(G, colors): """Uses a greedy independent set removal strategy to determine the colors. @@ -146,7 +146,7 @@ def strategy_independent_set(G, colors): yield from nodes -@nx._dispatch +@nx._dispatchable def strategy_connected_sequential_bfs(G, colors): """Returns an iterable over nodes in ``G`` in the order given by a breadth-first traversal. @@ -160,7 +160,7 @@ def strategy_connected_sequential_bfs(G, colors): return strategy_connected_sequential(G, colors, "bfs") -@nx._dispatch +@nx._dispatchable def strategy_connected_sequential_dfs(G, colors): """Returns an iterable over nodes in ``G`` in the order given by a depth-first traversal. @@ -174,7 +174,7 @@ def strategy_connected_sequential_dfs(G, colors): return strategy_connected_sequential(G, colors, "dfs") -@nx._dispatch +@nx._dispatchable def strategy_connected_sequential(G, colors, traversal="bfs"): """Returns an iterable over nodes in ``G`` in the order given by a breadth-first or depth-first traversal. @@ -207,7 +207,7 @@ def strategy_connected_sequential(G, colors, traversal="bfs"): yield end -@nx._dispatch +@nx._dispatchable def strategy_saturation_largest_first(G, colors): """Iterates over all the nodes of ``G`` in "saturation order" (also known as "DSATUR"). @@ -269,7 +269,7 @@ def strategy_saturation_largest_first(G, colors): } -@nx._dispatch +@nx._dispatchable def greedy_color(G, strategy="largest_first", interchange=False): """Color a graph using various strategies of greedy graph coloring. diff --git a/networkx/algorithms/communicability_alg.py b/networkx/algorithms/communicability_alg.py index c9144a7b84f..07316dc3ae2 100644 --- a/networkx/algorithms/communicability_alg.py +++ b/networkx/algorithms/communicability_alg.py @@ -9,7 +9,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def communicability(G): r"""Returns communicability between all pairs of nodes in G. @@ -91,7 +91,7 @@ def communicability(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def communicability_exp(G): r"""Returns communicability between all pairs of nodes in G. diff --git a/networkx/algorithms/community/asyn_fluid.py b/networkx/algorithms/community/asyn_fluid.py index 490bb046e7a..fea72c1bfdb 100644 --- a/networkx/algorithms/community/asyn_fluid.py +++ b/networkx/algorithms/community/asyn_fluid.py @@ -13,7 +13,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(3) -@nx._dispatch +@nx._dispatchable def asyn_fluidc(G, k, max_iter=100, seed=None): """Returns communities in `G` as detected by Fluid Communities algorithm. diff --git a/networkx/algorithms/community/centrality.py b/networkx/algorithms/community/centrality.py index efdc98460e4..43281701d2b 100644 --- a/networkx/algorithms/community/centrality.py +++ b/networkx/algorithms/community/centrality.py @@ -5,7 +5,7 @@ __all__ = ["girvan_newman"] -@nx._dispatch(preserve_edge_attrs="most_valuable_edge") +@nx._dispatchable(preserve_edge_attrs="most_valuable_edge") def girvan_newman(G, most_valuable_edge=None): """Finds communities in a graph using the Girvan–Newman method. diff --git a/networkx/algorithms/community/community_utils.py b/networkx/algorithms/community/community_utils.py index 5e4727eec42..b57cd9881cb 100644 --- a/networkx/algorithms/community/community_utils.py +++ b/networkx/algorithms/community/community_utils.py @@ -4,7 +4,7 @@ __all__ = ["is_partition"] -@nx._dispatch +@nx._dispatchable def is_partition(G, communities): """Returns *True* if `communities` is a partition of the nodes of `G`. diff --git a/networkx/algorithms/community/kclique.py b/networkx/algorithms/community/kclique.py index 60433669cee..c7249104204 100644 --- a/networkx/algorithms/community/kclique.py +++ b/networkx/algorithms/community/kclique.py @@ -5,7 +5,7 @@ __all__ = ["k_clique_communities"] -@nx._dispatch +@nx._dispatchable def k_clique_communities(G, k, cliques=None): """Find k-clique communities in graph using the percolation method. diff --git a/networkx/algorithms/community/kernighan_lin.py b/networkx/algorithms/community/kernighan_lin.py index a18c7779b5b..f6397d82be6 100644 --- a/networkx/algorithms/community/kernighan_lin.py +++ b/networkx/algorithms/community/kernighan_lin.py @@ -42,7 +42,7 @@ def _update_costs(costs_x, x): @not_implemented_for("directed") @py_random_state(4) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def kernighan_lin_bisection(G, partition=None, max_iter=10, weight="weight", seed=None): """Partition a graph into two blocks using the Kernighan–Lin algorithm. diff --git a/networkx/algorithms/community/label_propagation.py b/networkx/algorithms/community/label_propagation.py index 307dfbe3a42..8690855766b 100644 --- a/networkx/algorithms/community/label_propagation.py +++ b/networkx/algorithms/community/label_propagation.py @@ -14,7 +14,7 @@ @py_random_state("seed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def fast_label_propagation_communities(G, *, weight=None, seed=None): """Returns communities in `G` as detected by fast label propagation. @@ -137,7 +137,7 @@ def _fast_label_count(G, comms, node, weight=None): @py_random_state(2) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def asyn_lpa_communities(G, weight=None, seed=None): """Returns communities in `G` as detected by asynchronous label propagation. @@ -233,7 +233,7 @@ def asyn_lpa_communities(G, weight=None, seed=None): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def label_propagation_communities(G): """Generates community sets determined by label propagation diff --git a/networkx/algorithms/community/louvain.py b/networkx/algorithms/community/louvain.py index 6aa783d3152..0c880532b9d 100644 --- a/networkx/algorithms/community/louvain.py +++ b/networkx/algorithms/community/louvain.py @@ -11,7 +11,7 @@ @py_random_state("seed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def louvain_communities( G, weight="weight", resolution=1, threshold=0.0000001, seed=None ): @@ -121,7 +121,7 @@ def louvain_communities( @py_random_state("seed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def louvain_partitions( G, weight="weight", resolution=1, threshold=0.0000001, seed=None ): diff --git a/networkx/algorithms/community/lukes.py b/networkx/algorithms/community/lukes.py index 600a4db63d6..389fb51ca63 100644 --- a/networkx/algorithms/community/lukes.py +++ b/networkx/algorithms/community/lukes.py @@ -25,7 +25,7 @@ def _split_n_from(n, min_size_of_first_part): yield p1, n - p1 -@nx._dispatch(node_attrs="node_weight", edge_attrs="edge_weight") +@nx._dispatchable(node_attrs="node_weight", edge_attrs="edge_weight") def lukes_partitioning(G, max_size, node_weight=None, edge_weight=None): """Optimal partitioning of a weighted tree using the Lukes algorithm. diff --git a/networkx/algorithms/community/modularity_max.py b/networkx/algorithms/community/modularity_max.py index d76116f7b2b..f465e01c6b2 100644 --- a/networkx/algorithms/community/modularity_max.py +++ b/networkx/algorithms/community/modularity_max.py @@ -223,7 +223,7 @@ def _greedy_modularity_communities_generator(G, weight=None, resolution=1): yield communities.values() -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def greedy_modularity_communities( G, weight=None, @@ -356,7 +356,7 @@ def greedy_modularity_communities( @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def naive_greedy_modularity_communities(G, resolution=1, weight=None): r"""Find communities in G using greedy modularity maximization. diff --git a/networkx/algorithms/community/quality.py b/networkx/algorithms/community/quality.py index ab86b09113a..f09a6d454af 100644 --- a/networkx/algorithms/community/quality.py +++ b/networkx/algorithms/community/quality.py @@ -58,7 +58,7 @@ def _require_partition(G, partition): require_partition = argmap(_require_partition, (0, 1)) -@nx._dispatch +@nx._dispatchable def intra_community_edges(G, partition): """Returns the number of intra-community edges for a partition of `G`. @@ -76,7 +76,7 @@ def intra_community_edges(G, partition): return sum(G.subgraph(block).size() for block in partition) -@nx._dispatch +@nx._dispatchable def inter_community_edges(G, partition): """Returns the number of inter-community edges for a partition of `G`. according to the given @@ -108,7 +108,7 @@ def inter_community_edges(G, partition): return nx.quotient_graph(G, partition, create_using=MG).size() -@nx._dispatch +@nx._dispatchable def inter_community_non_edges(G, partition): """Returns the number of inter-community non-edges according to the given partition of the nodes of `G`. @@ -141,7 +141,7 @@ def inter_community_non_edges(G, partition): return inter_community_edges(nx.complement(G), partition) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def modularity(G, communities, weight="weight", resolution=1): r"""Returns the modularity of the given partition of the graph. @@ -256,7 +256,7 @@ def community_contribution(community): @require_partition -@nx._dispatch +@nx._dispatchable def partition_quality(G, partition): """Returns the coverage and performance of a partition of G. diff --git a/networkx/algorithms/components/attracting.py b/networkx/algorithms/components/attracting.py index 1cc2e15615c..305c696353b 100644 --- a/networkx/algorithms/components/attracting.py +++ b/networkx/algorithms/components/attracting.py @@ -10,7 +10,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def attracting_components(G): """Generates the attracting components in `G`. @@ -54,7 +54,7 @@ def attracting_components(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def number_attracting_components(G): """Returns the number of attracting components in `G`. @@ -83,7 +83,7 @@ def number_attracting_components(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_attracting_component(G): """Returns True if `G` consists of a single attracting component. diff --git a/networkx/algorithms/components/biconnected.py b/networkx/algorithms/components/biconnected.py index 632b2d598d7..0d2f06975f8 100644 --- a/networkx/algorithms/components/biconnected.py +++ b/networkx/algorithms/components/biconnected.py @@ -13,7 +13,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def is_biconnected(G): """Returns True if the graph is biconnected, False otherwise. @@ -94,7 +94,7 @@ def is_biconnected(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def biconnected_component_edges(G): """Returns a generator of lists of edges, one list for each biconnected component of the input graph. @@ -167,7 +167,7 @@ def biconnected_component_edges(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def biconnected_components(G): """Returns a generator of sets of nodes, one set for each biconnected component of the graph @@ -260,7 +260,7 @@ def biconnected_components(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def articulation_points(G): """Yield the articulation points, or cut vertices, of a graph. diff --git a/networkx/algorithms/components/connected.py b/networkx/algorithms/components/connected.py index 8bc10980fe2..ad3e0155a7f 100644 --- a/networkx/algorithms/components/connected.py +++ b/networkx/algorithms/components/connected.py @@ -13,7 +13,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def connected_components(G): """Generate connected components. @@ -69,7 +69,7 @@ def connected_components(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def number_connected_components(G): """Returns the number of connected components. @@ -109,7 +109,7 @@ def number_connected_components(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def is_connected(G): """Returns True if the graph is connected, False otherwise. @@ -155,7 +155,7 @@ def is_connected(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def node_connected_component(G, n): """Returns the set of nodes in the component of graph containing node n. diff --git a/networkx/algorithms/components/semiconnected.py b/networkx/algorithms/components/semiconnected.py index 24a89f34d44..13cfa988a0b 100644 --- a/networkx/algorithms/components/semiconnected.py +++ b/networkx/algorithms/components/semiconnected.py @@ -6,7 +6,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_semiconnected(G): r"""Returns True if the graph is semiconnected, False otherwise. diff --git a/networkx/algorithms/components/strongly_connected.py b/networkx/algorithms/components/strongly_connected.py index 5bf5b994766..97370425f17 100644 --- a/networkx/algorithms/components/strongly_connected.py +++ b/networkx/algorithms/components/strongly_connected.py @@ -13,7 +13,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def strongly_connected_components(G): """Generate nodes in strongly connected components of graph. @@ -112,7 +112,7 @@ def strongly_connected_components(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def kosaraju_strongly_connected_components(G, source=None): """Generate nodes in strongly connected components of graph. @@ -174,7 +174,7 @@ def kosaraju_strongly_connected_components(G, source=None): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def strongly_connected_components_recursive(G): """Generate nodes in strongly connected components of graph. @@ -256,7 +256,7 @@ def strongly_connected_components_recursive(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def number_strongly_connected_components(G): """Returns number of strongly connected components in graph. @@ -295,7 +295,7 @@ def number_strongly_connected_components(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_strongly_connected(G): """Test directed graph for strong connectivity. @@ -347,7 +347,7 @@ def is_strongly_connected(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def condensation(G, scc=None): """Returns the condensation of G. diff --git a/networkx/algorithms/components/tests/test_connected.py b/networkx/algorithms/components/tests/test_connected.py index 4c9b8d28fd5..cd08640b387 100644 --- a/networkx/algorithms/components/tests/test_connected.py +++ b/networkx/algorithms/components/tests/test_connected.py @@ -61,7 +61,7 @@ def setup_class(cls): C = [] cls.gc.append((G, C)) - # This additionally tests the @nx._dispatch mechanism, treating + # This additionally tests the @nx._dispatchable mechanism, treating # nx.connected_components as if it were a re-implementation from another package @pytest.mark.parametrize("wrapper", [lambda x: x, dispatch_interface.convert]) def test_connected_components(self, wrapper): diff --git a/networkx/algorithms/components/weakly_connected.py b/networkx/algorithms/components/weakly_connected.py index c8dc2350ef1..7437fad8242 100644 --- a/networkx/algorithms/components/weakly_connected.py +++ b/networkx/algorithms/components/weakly_connected.py @@ -10,7 +10,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def weakly_connected_components(G): """Generate weakly connected components of G. @@ -66,7 +66,7 @@ def weakly_connected_components(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def number_weakly_connected_components(G): """Returns the number of weakly connected components in G. @@ -106,7 +106,7 @@ def number_weakly_connected_components(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_weakly_connected(G): """Test directed graph for weak connectivity. diff --git a/networkx/algorithms/connectivity/connectivity.py b/networkx/algorithms/connectivity/connectivity.py index cbb34152bba..06f035feb4d 100644 --- a/networkx/algorithms/connectivity/connectivity.py +++ b/networkx/algorithms/connectivity/connectivity.py @@ -31,7 +31,7 @@ ] -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "auxiliary?": 4, "residual?": 5}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"auxiliary", "residual"}, @@ -214,7 +214,7 @@ def local_node_connectivity( return nx.maximum_flow_value(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs) -@nx._dispatch +@nx._dispatchable def node_connectivity(G, s=None, t=None, flow_func=None): r"""Returns node connectivity for a graph or digraph G. @@ -355,7 +355,7 @@ def neighbors(v): return K -@nx._dispatch +@nx._dispatchable def average_node_connectivity(G, flow_func=None): r"""Returns the average connectivity of a graph G. @@ -424,7 +424,7 @@ def average_node_connectivity(G, flow_func=None): return num / den -@nx._dispatch +@nx._dispatchable def all_pairs_node_connectivity(G, nbunch=None, flow_func=None): """Compute node connectivity between all pairs of nodes of G. @@ -492,7 +492,7 @@ def all_pairs_node_connectivity(G, nbunch=None, flow_func=None): return all_pairs -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "auxiliary?": 4, "residual?": 5}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"residual"}, @@ -658,7 +658,7 @@ def local_edge_connectivity( return nx.maximum_flow_value(H, s, t, **kwargs) -@nx._dispatch +@nx._dispatchable def edge_connectivity(G, s=None, t=None, flow_func=None, cutoff=None): r"""Returns the edge connectivity of the graph or digraph G. diff --git a/networkx/algorithms/connectivity/cuts.py b/networkx/algorithms/connectivity/cuts.py index d5883ba8f8e..e51c6843eb3 100644 --- a/networkx/algorithms/connectivity/cuts.py +++ b/networkx/algorithms/connectivity/cuts.py @@ -21,7 +21,7 @@ ] -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "auxiliary?": 4, "residual?": 5}, preserve_edge_attrs={ "auxiliary": {"capacity": float("inf")}, @@ -161,7 +161,7 @@ def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): return cutset -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "auxiliary?": 4, "residual?": 5}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_node_attrs={"auxiliary": {"id": None}}, @@ -305,7 +305,7 @@ def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): return node_cut - {s, t} -@nx._dispatch +@nx._dispatchable def minimum_node_cut(G, s=None, t=None, flow_func=None): r"""Returns a set of nodes of minimum cardinality that disconnects G. @@ -451,7 +451,7 @@ def neighbors(v): return min_cut -@nx._dispatch +@nx._dispatchable def minimum_edge_cut(G, s=None, t=None, flow_func=None): r"""Returns a set of edges of minimum cardinality that disconnects G. diff --git a/networkx/algorithms/connectivity/disjoint_paths.py b/networkx/algorithms/connectivity/disjoint_paths.py index 3fe450517d5..b80aa9c7fb4 100644 --- a/networkx/algorithms/connectivity/disjoint_paths.py +++ b/networkx/algorithms/connectivity/disjoint_paths.py @@ -19,7 +19,7 @@ __all__ = ["edge_disjoint_paths", "node_disjoint_paths"] -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "auxiliary?": 5, "residual?": 6}, preserve_edge_attrs={ "auxiliary": {"capacity": float("inf")}, @@ -234,7 +234,7 @@ def edge_disjoint_paths( paths_found += 1 -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "auxiliary?": 5, "residual?": 6}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_node_attrs={"auxiliary": {"id": None}}, diff --git a/networkx/algorithms/connectivity/edge_augmentation.py b/networkx/algorithms/connectivity/edge_augmentation.py index c1215509e88..0e77a0bf683 100644 --- a/networkx/algorithms/connectivity/edge_augmentation.py +++ b/networkx/algorithms/connectivity/edge_augmentation.py @@ -24,7 +24,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def is_k_edge_connected(G, k): """Tests to see if a graph is k-edge-connected. @@ -75,7 +75,7 @@ def is_k_edge_connected(G, k): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def is_locally_k_edge_connected(G, s, t, k): """Tests to see if an edge in a graph is locally k-edge-connected. @@ -133,7 +133,7 @@ def is_locally_k_edge_connected(G, s, t, k): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def k_edge_augmentation(G, k, avail=None, weight=None, partial=False): """Finds set of edges to k-edge-connect G. @@ -284,7 +284,7 @@ def k_edge_augmentation(G, k, avail=None, weight=None, partial=False): raise -@nx._dispatch +@nx._dispatchable def partial_k_edge_augmentation(G, k, avail, weight=None): """Finds augmentation that k-edge-connects as much of the graph as possible. @@ -387,7 +387,7 @@ def _edges_between_disjoint(H, only1, only2): @not_implemented_for("multigraph") @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def one_edge_augmentation(G, avail=None, weight=None, partial=False): """Finds minimum weight set of edges to connect G. @@ -442,7 +442,7 @@ def one_edge_augmentation(G, avail=None, weight=None, partial=False): @not_implemented_for("multigraph") @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def bridge_augmentation(G, avail=None, weight=None): """Finds the a set of edges that bridge connects G. @@ -578,7 +578,7 @@ def _lightest_meta_edges(mapping, avail_uv, avail_w): yield MetaEdge((mu, mv), (u, v), w) -@nx._dispatch +@nx._dispatchable def unconstrained_one_edge_augmentation(G): """Finds the smallest set of edges to connect G. @@ -621,7 +621,7 @@ def unconstrained_one_edge_augmentation(G): yield (inverse[mu][0], inverse[mv][0]) -@nx._dispatch +@nx._dispatchable def weighted_one_edge_augmentation(G, avail, weight=None, partial=False): """Finds the minimum weight set of edges to connect G if one exists. @@ -690,7 +690,7 @@ def weighted_one_edge_augmentation(G, avail, weight=None, partial=False): yield edge -@nx._dispatch +@nx._dispatchable def unconstrained_bridge_augmentation(G): """Finds an optimal 2-edge-augmentation of G using the fewest edges. @@ -845,7 +845,7 @@ def unconstrained_bridge_augmentation(G): break -@nx._dispatch +@nx._dispatchable def weighted_bridge_augmentation(G, avail, weight=None): """Finds an approximate min-weight 2-edge-augmentation of G. @@ -1040,7 +1040,7 @@ def _minimum_rooted_branching(D, root): return A -@nx._dispatch +@nx._dispatchable def collapse(G, grouped_nodes): """Collapses each group of nodes into a single node. @@ -1112,7 +1112,7 @@ def collapse(G, grouped_nodes): return C -@nx._dispatch +@nx._dispatchable def complement_edges(G): """Returns only the edges in the complement of G @@ -1158,7 +1158,7 @@ def _compat_shuffle(rng, input): @not_implemented_for("multigraph") @not_implemented_for("directed") @py_random_state(4) -@nx._dispatch +@nx._dispatchable def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None): """Greedy algorithm for finding a k-edge-augmentation diff --git a/networkx/algorithms/connectivity/edge_kcomponents.py b/networkx/algorithms/connectivity/edge_kcomponents.py index e602c33aaee..d2ed1ac5c7d 100644 --- a/networkx/algorithms/connectivity/edge_kcomponents.py +++ b/networkx/algorithms/connectivity/edge_kcomponents.py @@ -23,7 +23,7 @@ @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def k_edge_components(G, k): """Generates nodes in each maximal k-edge-connected component in G. @@ -107,7 +107,7 @@ def k_edge_components(G, k): @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def k_edge_subgraphs(G, k): """Generates nodes in each maximal k-edge-connected subgraph in G. @@ -196,7 +196,7 @@ def _k_edge_subgraphs_nodes(G, k): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def bridge_components(G): """Finds all bridge-connected components G. @@ -503,7 +503,7 @@ def _high_degree_components(G, k): yield from nx.connected_components(H) -@nx._dispatch +@nx._dispatchable def general_k_edge_subgraphs(G, k): """General algorithm to find all maximal k-edge-connected subgraphs in G. diff --git a/networkx/algorithms/connectivity/kcomponents.py b/networkx/algorithms/connectivity/kcomponents.py index 19a6e486b84..50d5c8f4190 100644 --- a/networkx/algorithms/connectivity/kcomponents.py +++ b/networkx/algorithms/connectivity/kcomponents.py @@ -17,7 +17,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def k_components(G, flow_func=None): r"""Returns the k-component structure of a graph G. diff --git a/networkx/algorithms/connectivity/kcutsets.py b/networkx/algorithms/connectivity/kcutsets.py index f2036faf1e1..53f8d3b8f6e 100644 --- a/networkx/algorithms/connectivity/kcutsets.py +++ b/networkx/algorithms/connectivity/kcutsets.py @@ -21,7 +21,7 @@ __all__ = ["all_node_cuts"] -@nx._dispatch +@nx._dispatchable def all_node_cuts(G, k=None, flow_func=None): r"""Returns all minimum k cutsets of an undirected graph G. diff --git a/networkx/algorithms/connectivity/stoerwagner.py b/networkx/algorithms/connectivity/stoerwagner.py index dc95877e221..cd9d5acaf17 100644 --- a/networkx/algorithms/connectivity/stoerwagner.py +++ b/networkx/algorithms/connectivity/stoerwagner.py @@ -12,7 +12,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def stoer_wagner(G, weight="weight", heap=BinaryHeap): r"""Returns the weighted minimum edge cut using the Stoer-Wagner algorithm. diff --git a/networkx/algorithms/connectivity/utils.py b/networkx/algorithms/connectivity/utils.py index bf6860a208f..cd3dadf61da 100644 --- a/networkx/algorithms/connectivity/utils.py +++ b/networkx/algorithms/connectivity/utils.py @@ -6,7 +6,7 @@ __all__ = ["build_auxiliary_node_connectivity", "build_auxiliary_edge_connectivity"] -@nx._dispatch +@nx._dispatchable def build_auxiliary_node_connectivity(G): r"""Creates a directed graph D from an undirected graph G to compute flow based node connectivity. @@ -59,7 +59,7 @@ def build_auxiliary_node_connectivity(G): return H -@nx._dispatch +@nx._dispatchable def build_auxiliary_edge_connectivity(G): """Auxiliary digraph for computing flow based edge connectivity diff --git a/networkx/algorithms/core.py b/networkx/algorithms/core.py index 87835c299d5..7ab7598260c 100644 --- a/networkx/algorithms/core.py +++ b/networkx/algorithms/core.py @@ -42,7 +42,7 @@ @nx.utils.not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def core_number(G): """Returns the core number for each node. @@ -148,7 +148,7 @@ def _core_subgraph(G, k_filter, k=None, core=None): return G.subgraph(nodes).copy() -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def k_core(G, k=None, core_number=None): """Returns the k-core of G. @@ -224,7 +224,7 @@ def k_filter(v, k, c): return _core_subgraph(G, k_filter, k, core_number) -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def k_shell(G, k=None, core_number=None): """Returns the k-shell of G. @@ -306,7 +306,7 @@ def k_filter(v, k, c): return _core_subgraph(G, k_filter, k, core_number) -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def k_crust(G, k=None, core_number=None): """Returns the k-crust of G. @@ -389,7 +389,7 @@ def k_crust(G, k=None, core_number=None): return G.subgraph(nodes).copy() -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def k_corona(G, k, core_number=None): """Returns the k-corona of G. @@ -468,7 +468,7 @@ def func(v, k, c): @nx.utils.not_implemented_for("directed") @nx.utils.not_implemented_for("multigraph") -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def k_truss(G, k): """Returns the k-truss of `G`. @@ -550,7 +550,7 @@ def k_truss(G, k): @nx.utils.not_implemented_for("multigraph") @nx.utils.not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def onion_layers(G): """Returns the layer of each vertex in an onion decomposition of the graph. diff --git a/networkx/algorithms/covering.py b/networkx/algorithms/covering.py index b31521f4297..bed482bc4b3 100644 --- a/networkx/algorithms/covering.py +++ b/networkx/algorithms/covering.py @@ -11,7 +11,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def min_edge_cover(G, matching_algorithm=None): """Returns the min cardinality edge cover of the graph as a set of edges. @@ -106,7 +106,7 @@ def min_edge_cover(G, matching_algorithm=None): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def is_edge_cover(G, cover): """Decides whether a set of edges is a valid edge cover of the graph. diff --git a/networkx/algorithms/cuts.py b/networkx/algorithms/cuts.py index ce455eb47c8..d7d54e7bb0d 100644 --- a/networkx/algorithms/cuts.py +++ b/networkx/algorithms/cuts.py @@ -21,7 +21,7 @@ # TODO STILL NEED TO UPDATE ALL THE DOCUMENTATION! -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def cut_size(G, S, T=None, weight=None): """Returns the size of the cut between two sets of nodes. @@ -84,7 +84,7 @@ def cut_size(G, S, T=None, weight=None): return sum(weight for u, v, weight in edges) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def volume(G, S, weight=None): """Returns the volume of a set of nodes. @@ -127,7 +127,7 @@ def volume(G, S, weight=None): return sum(d for v, d in degree(S, weight=weight)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def normalized_cut_size(G, S, T=None, weight=None): """Returns the normalized size of the cut between two sets of nodes. @@ -180,7 +180,7 @@ def normalized_cut_size(G, S, T=None, weight=None): return num_cut_edges * ((1 / volume_S) + (1 / volume_T)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def conductance(G, S, T=None, weight=None): """Returns the conductance of two sets of nodes. @@ -228,7 +228,7 @@ def conductance(G, S, T=None, weight=None): return num_cut_edges / min(volume_S, volume_T) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def edge_expansion(G, S, T=None, weight=None): """Returns the edge expansion between two node sets. @@ -275,7 +275,7 @@ def edge_expansion(G, S, T=None, weight=None): return num_cut_edges / min(len(S), len(T)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def mixing_expansion(G, S, T=None, weight=None): """Returns the mixing expansion between two node sets. @@ -323,7 +323,7 @@ def mixing_expansion(G, S, T=None, weight=None): # TODO What is the generalization to two arguments, S and T? Does the # denominator become `min(len(S), len(T))`? -@nx._dispatch +@nx._dispatchable def node_expansion(G, S): """Returns the node expansion of the set `S`. @@ -363,7 +363,7 @@ def node_expansion(G, S): # TODO What is the generalization to two arguments, S and T? Does the # denominator become `min(len(S), len(T))`? -@nx._dispatch +@nx._dispatchable def boundary_expansion(G, S): """Returns the boundary expansion of the set `S`. diff --git a/networkx/algorithms/cycles.py b/networkx/algorithms/cycles.py index 5221ca15893..dd7d4513a71 100644 --- a/networkx/algorithms/cycles.py +++ b/networkx/algorithms/cycles.py @@ -24,7 +24,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def cycle_basis(G, root=None): """Returns a list of cycles which form a basis for cycles of G. @@ -101,7 +101,7 @@ def cycle_basis(G, root=None): return cycles -@nx._dispatch +@nx._dispatchable def simple_cycles(G, length_bound=None): """Find simple cycles (elementary circuits) of a graph. @@ -474,7 +474,7 @@ def _bounded_cycle_search(G, path, length_bound): B[w].add(v) -@nx._dispatch +@nx._dispatchable def chordless_cycles(G, length_bound=None): """Find simple chordless cycles of a graph. @@ -763,7 +763,7 @@ def _chordless_cycle_search(F, B, path, length_bound): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def recursive_simple_cycles(G): """Find simple cycles (elementary circuits) of a directed graph. @@ -873,7 +873,7 @@ def circuit(thisnode, startnode, component): return result -@nx._dispatch +@nx._dispatchable def find_cycle(G, source=None, orientation=None): """Returns a cycle found via depth-first traversal. @@ -1035,7 +1035,7 @@ def tailhead(edge): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def minimum_cycle_basis(G, weight=None): """Returns a minimum weight cycle basis for G @@ -1165,7 +1165,7 @@ def _min_cycle(G, orth, weight): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def girth(G): """Returns the girth of the graph. diff --git a/networkx/algorithms/d_separation.py b/networkx/algorithms/d_separation.py index dc1e105ee3a..a688eca4081 100644 --- a/networkx/algorithms/d_separation.py +++ b/networkx/algorithms/d_separation.py @@ -229,7 +229,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_d_separator(G, x, y, z): """Return whether node sets `x` and `y` are d-separated by `z`. @@ -337,7 +337,7 @@ def is_d_separator(G, x, y, z): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def find_minimal_d_separator(G, x, y, *, included=None, restricted=None): """Returns a minimal d-separating set between `x` and `y` if possible @@ -444,7 +444,7 @@ def find_minimal_d_separator(G, x, y, *, included=None, restricted=None): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_minimal_d_separator(G, x, y, z, *, included=None, restricted=None): """Determine if `z` is a minimal d-separator for `x` and `y`. diff --git a/networkx/algorithms/dag.py b/networkx/algorithms/dag.py index eb3a064e4b2..70cdd8d968d 100644 --- a/networkx/algorithms/dag.py +++ b/networkx/algorithms/dag.py @@ -36,7 +36,7 @@ chaini = chain.from_iterable -@nx._dispatch +@nx._dispatchable def descendants(G, source): """Returns all nodes reachable from `source` in `G`. @@ -73,7 +73,7 @@ def descendants(G, source): return {child for parent, child in nx.bfs_edges(G, source)} -@nx._dispatch +@nx._dispatchable def ancestors(G, source): """Returns all nodes having a path to `source` in `G`. @@ -110,7 +110,7 @@ def ancestors(G, source): return {child for parent, child in nx.bfs_edges(G, source, reverse=True)} -@nx._dispatch +@nx._dispatchable def has_cycle(G): """Decides whether the directed graph has a cycle.""" try: @@ -122,7 +122,7 @@ def has_cycle(G): return False -@nx._dispatch +@nx._dispatchable def is_directed_acyclic_graph(G): """Returns True if the graph `G` is a directed acyclic graph (DAG) or False if not. @@ -163,7 +163,7 @@ def is_directed_acyclic_graph(G): return G.is_directed() and not has_cycle(G) -@nx._dispatch +@nx._dispatchable def topological_generations(G): """Stratifies a DAG into generations. @@ -241,7 +241,7 @@ def topological_generations(G): ) -@nx._dispatch +@nx._dispatchable def topological_sort(G): """Returns a generator of nodes in topologically sorted order. @@ -310,7 +310,7 @@ def topological_sort(G): yield from generation -@nx._dispatch +@nx._dispatchable def lexicographical_topological_sort(G, key=None): """Generate the nodes in the unique lexicographical topological sort order. @@ -453,7 +453,7 @@ def create_tuple(node): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def all_topological_sorts(G): """Returns a generator of _all_ topological sorts of the directed graph G. @@ -572,7 +572,7 @@ def all_topological_sorts(G): break -@nx._dispatch +@nx._dispatchable def is_aperiodic(G): """Returns True if `G` is aperiodic. @@ -665,7 +665,7 @@ def is_aperiodic(G): return g == 1 and nx.is_aperiodic(G.subgraph(set(G) - set(levels))) -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def transitive_closure(G, reflexive=False): """Returns transitive closure of a graph @@ -758,7 +758,7 @@ def transitive_closure(G, reflexive=False): @not_implemented_for("undirected") -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def transitive_closure_dag(G, topo_order=None): """Returns the transitive closure of a directed acyclic graph. @@ -815,7 +815,7 @@ def transitive_closure_dag(G, topo_order=None): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def transitive_reduction(G): """Returns transitive reduction of a directed graph @@ -888,7 +888,7 @@ def transitive_reduction(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def antichains(G, topo_order=None): """Generates antichains from a directed acyclic graph (DAG). @@ -955,7 +955,7 @@ def antichains(G, topo_order=None): @not_implemented_for("undirected") -@nx._dispatch(edge_attrs={"weight": "default_weight"}) +@nx._dispatchable(edge_attrs={"weight": "default_weight"}) def dag_longest_path(G, weight="weight", default_weight=1, topo_order=None): """Returns the longest path in a directed acyclic graph (DAG). @@ -1051,7 +1051,7 @@ def dag_longest_path(G, weight="weight", default_weight=1, topo_order=None): @not_implemented_for("undirected") -@nx._dispatch(edge_attrs={"weight": "default_weight"}) +@nx._dispatchable(edge_attrs={"weight": "default_weight"}) def dag_longest_path_length(G, weight="weight", default_weight=1): """Returns the longest path length in a DAG @@ -1103,7 +1103,7 @@ def dag_longest_path_length(G, weight="weight", default_weight=1): return path_length -@nx._dispatch +@nx._dispatchable def root_to_leaf_paths(G): """Yields root-to-leaf paths in a directed acyclic graph. @@ -1124,7 +1124,7 @@ def root_to_leaf_paths(G): @not_implemented_for("multigraph") @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def dag_to_branching(G): """Returns a branching representing all (overlapping) paths from root nodes to leaf nodes in the given directed acyclic graph. @@ -1222,7 +1222,7 @@ def dag_to_branching(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def compute_v_structures(G): """Iterate through the graph to compute all v-structures. diff --git a/networkx/algorithms/distance_measures.py b/networkx/algorithms/distance_measures.py index 844e85f335b..423d9b6dee5 100644 --- a/networkx/algorithms/distance_measures.py +++ b/networkx/algorithms/distance_measures.py @@ -237,7 +237,7 @@ def _extrema_bounding(G, compute="diameter", weight=None): return None -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def eccentricity(G, v=None, sp=None, weight=None): """Returns the eccentricity of nodes in G. @@ -326,7 +326,7 @@ def eccentricity(G, v=None, sp=None, weight=None): return e -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def diameter(G, e=None, usebounds=False, weight=None): """Returns the diameter of the graph G. @@ -382,7 +382,7 @@ def diameter(G, e=None, usebounds=False, weight=None): return max(e.values()) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def periphery(G, e=None, usebounds=False, weight=None): """Returns the periphery of the graph G. @@ -441,7 +441,7 @@ def periphery(G, e=None, usebounds=False, weight=None): return p -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def radius(G, e=None, usebounds=False, weight=None): """Returns the radius of the graph G. @@ -494,7 +494,7 @@ def radius(G, e=None, usebounds=False, weight=None): return min(e.values()) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def center(G, e=None, usebounds=False, weight=None): """Returns the center of the graph G. @@ -553,7 +553,7 @@ def center(G, e=None, usebounds=False, weight=None): return p -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def barycenter(G, weight=None, attr=None, sp=None): r"""Calculate barycenter of a connected graph, optionally with edge weights. @@ -633,7 +633,7 @@ def barycenter(G, weight=None, attr=None, sp=None): @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=True): """Returns the resistance distance between pairs of nodes in graph G. @@ -765,7 +765,7 @@ def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=Tr @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def effective_graph_resistance(G, weight=None, invert_weight=True): """Returns the Effective graph resistance of G. @@ -853,7 +853,7 @@ def effective_graph_resistance(G, weight=None, invert_weight=True): @nx.utils.not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def kemeny_constant(G, *, weight=None): """Returns the Kemeny constant of the given graph. diff --git a/networkx/algorithms/distance_regular.py b/networkx/algorithms/distance_regular.py index 936fd5a2216..27b4d0216e4 100644 --- a/networkx/algorithms/distance_regular.py +++ b/networkx/algorithms/distance_regular.py @@ -17,7 +17,7 @@ ] -@nx._dispatch +@nx._dispatchable def is_distance_regular(G): """Returns True if the graph is distance regular, False otherwise. @@ -111,7 +111,7 @@ def global_parameters(b, c): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def intersection_array(G): """Returns the intersection array of a distance-regular graph. @@ -184,7 +184,7 @@ def intersection_array(G): # TODO There is a definition for directed strongly regular graphs. @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def is_strongly_regular(G): """Returns True if and only if the given graph is strongly regular. diff --git a/networkx/algorithms/dominance.py b/networkx/algorithms/dominance.py index ffdbe7d2139..ab841fe21d5 100644 --- a/networkx/algorithms/dominance.py +++ b/networkx/algorithms/dominance.py @@ -11,7 +11,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def immediate_dominators(G, start): """Returns the immediate dominators of all nodes of a directed graph. @@ -84,7 +84,7 @@ def intersect(u, v): return idom -@nx._dispatch +@nx._dispatchable def dominance_frontiers(G, start): """Returns the dominance frontiers of all nodes of a directed graph. diff --git a/networkx/algorithms/dominating.py b/networkx/algorithms/dominating.py index 80276bb5a1d..8e9a458f8e3 100644 --- a/networkx/algorithms/dominating.py +++ b/networkx/algorithms/dominating.py @@ -7,7 +7,7 @@ __all__ = ["dominating_set", "is_dominating_set"] -@nx._dispatch +@nx._dispatchable def dominating_set(G, start_with=None): r"""Finds a dominating set for the graph G. @@ -65,7 +65,7 @@ def dominating_set(G, start_with=None): return dominating_set -@nx._dispatch +@nx._dispatchable def is_dominating_set(G, nbunch): """Checks if `nbunch` is a dominating set for `G`. diff --git a/networkx/algorithms/efficiency_measures.py b/networkx/algorithms/efficiency_measures.py index 3beea38b013..2c99b011431 100644 --- a/networkx/algorithms/efficiency_measures.py +++ b/networkx/algorithms/efficiency_measures.py @@ -9,7 +9,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def efficiency(G, u, v): """Returns the efficiency of a pair of nodes in a graph. @@ -60,7 +60,7 @@ def efficiency(G, u, v): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def global_efficiency(G): """Returns the average global efficiency of the graph. @@ -121,7 +121,7 @@ def global_efficiency(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def local_efficiency(G): """Returns the average local efficiency of the graph. diff --git a/networkx/algorithms/euler.py b/networkx/algorithms/euler.py index 9d61b5e4130..54ab9bcff09 100644 --- a/networkx/algorithms/euler.py +++ b/networkx/algorithms/euler.py @@ -17,7 +17,7 @@ ] -@nx._dispatch +@nx._dispatchable def is_eulerian(G): """Returns True if and only if `G` is Eulerian. @@ -69,7 +69,7 @@ def is_eulerian(G): return all(d % 2 == 0 for v, d in G.degree()) and nx.is_connected(G) -@nx._dispatch +@nx._dispatchable def is_semieulerian(G): """Return True iff `G` is semi-Eulerian. @@ -154,7 +154,7 @@ def _multigraph_eulerian_circuit(G, source): G.remove_edge(current_vertex, next_vertex, next_key) -@nx._dispatch +@nx._dispatchable def eulerian_circuit(G, source=None, keys=False): """Returns an iterator over the edges of an Eulerian circuit in `G`. @@ -235,7 +235,7 @@ def eulerian_circuit(G, source=None, keys=False): yield from _simplegraph_eulerian_circuit(G, source) -@nx._dispatch +@nx._dispatchable def has_eulerian_path(G, source=None): """Return True iff `G` has an Eulerian path. @@ -330,7 +330,7 @@ def has_eulerian_path(G, source=None): return sum(d % 2 == 1 for v, d in G.degree()) == 2 and nx.is_connected(G) -@nx._dispatch +@nx._dispatchable def eulerian_path(G, source=None, keys=False): """Return an iterator over the edges of an Eulerian path in `G`. @@ -386,7 +386,7 @@ def eulerian_path(G, source=None, keys=False): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def eulerize(G): """Transforms a graph into an Eulerian graph. diff --git a/networkx/algorithms/flow/boykovkolmogorov.py b/networkx/algorithms/flow/boykovkolmogorov.py index f571c98aebc..c75bbba69ca 100644 --- a/networkx/algorithms/flow/boykovkolmogorov.py +++ b/networkx/algorithms/flow/boykovkolmogorov.py @@ -10,7 +10,7 @@ __all__ = ["boykov_kolmogorov"] -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "residual?": 4}, edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, diff --git a/networkx/algorithms/flow/capacityscaling.py b/networkx/algorithms/flow/capacityscaling.py index 2c0002d86c1..bf68565c548 100644 --- a/networkx/algorithms/flow/capacityscaling.py +++ b/networkx/algorithms/flow/capacityscaling.py @@ -149,7 +149,9 @@ def _build_flow_dict(G, R, capacity, weight): return flow_dict -@nx._dispatch(node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0}) +@nx._dispatchable( + node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0} +) def capacity_scaling( G, demand="demand", capacity="capacity", weight="weight", heap=BinaryHeap ): diff --git a/networkx/algorithms/flow/dinitz_alg.py b/networkx/algorithms/flow/dinitz_alg.py index 0348393d682..b40fd78351f 100644 --- a/networkx/algorithms/flow/dinitz_alg.py +++ b/networkx/algorithms/flow/dinitz_alg.py @@ -10,7 +10,7 @@ __all__ = ["dinitz"] -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "residual?": 4}, edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, diff --git a/networkx/algorithms/flow/edmondskarp.py b/networkx/algorithms/flow/edmondskarp.py index 7c8440a4519..6fa76d7cabc 100644 --- a/networkx/algorithms/flow/edmondskarp.py +++ b/networkx/algorithms/flow/edmondskarp.py @@ -8,7 +8,7 @@ __all__ = ["edmonds_karp"] -@nx._dispatch( +@nx._dispatchable( graphs="R", preserve_edge_attrs={"R": {"capacity": float("inf"), "flow": 0}}, preserve_graph_attrs=True, @@ -122,7 +122,7 @@ def edmonds_karp_impl(G, s, t, capacity, residual, cutoff): return R -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "residual?": 4}, edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, diff --git a/networkx/algorithms/flow/gomory_hu.py b/networkx/algorithms/flow/gomory_hu.py index 0be27d56494..802c9621d80 100644 --- a/networkx/algorithms/flow/gomory_hu.py +++ b/networkx/algorithms/flow/gomory_hu.py @@ -13,7 +13,7 @@ @not_implemented_for("directed") -@nx._dispatch(edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(edge_attrs={"capacity": float("inf")}) def gomory_hu_tree(G, capacity="capacity", flow_func=None): r"""Returns the Gomory-Hu tree of an undirected graph G. diff --git a/networkx/algorithms/flow/maxflow.py b/networkx/algorithms/flow/maxflow.py index 35f359ee3b0..f196fe31d46 100644 --- a/networkx/algorithms/flow/maxflow.py +++ b/networkx/algorithms/flow/maxflow.py @@ -16,7 +16,7 @@ __all__ = ["maximum_flow", "maximum_flow_value", "minimum_cut", "minimum_cut_value"] -@nx._dispatch(graphs="flowG", edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")}) def maximum_flow(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Find a maximum single-commodity flow. @@ -163,7 +163,7 @@ def maximum_flow(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): return (R.graph["flow_value"], flow_dict) -@nx._dispatch(graphs="flowG", edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")}) def maximum_flow_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Find the value of maximum single-commodity flow. @@ -303,7 +303,7 @@ def maximum_flow_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwa return R.graph["flow_value"] -@nx._dispatch(graphs="flowG", edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")}) def minimum_cut(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Compute the value and the node partition of a minimum (s, t)-cut. @@ -467,7 +467,7 @@ def minimum_cut(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): return (R.graph["flow_value"], partition) -@nx._dispatch(graphs="flowG", edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")}) def minimum_cut_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Compute the value of a minimum (s, t)-cut. diff --git a/networkx/algorithms/flow/mincost.py b/networkx/algorithms/flow/mincost.py index 7a6c3351190..2f9390d7a1c 100644 --- a/networkx/algorithms/flow/mincost.py +++ b/networkx/algorithms/flow/mincost.py @@ -7,7 +7,9 @@ import networkx as nx -@nx._dispatch(node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0}) +@nx._dispatchable( + node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0} +) def min_cost_flow_cost(G, demand="demand", capacity="capacity", weight="weight"): r"""Find the cost of a minimum cost flow satisfying all demands in digraph G. @@ -97,7 +99,9 @@ def min_cost_flow_cost(G, demand="demand", capacity="capacity", weight="weight") return nx.network_simplex(G, demand=demand, capacity=capacity, weight=weight)[0] -@nx._dispatch(node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0}) +@nx._dispatchable( + node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0} +) def min_cost_flow(G, demand="demand", capacity="capacity", weight="weight"): r"""Returns a minimum cost flow satisfying all demands in digraph G. @@ -188,7 +192,7 @@ def min_cost_flow(G, demand="demand", capacity="capacity", weight="weight"): return nx.network_simplex(G, demand=demand, capacity=capacity, weight=weight)[1] -@nx._dispatch(edge_attrs={"weight": 0}) +@nx._dispatchable(edge_attrs={"weight": 0}) def cost_of_flow(G, flowDict, weight="weight"): """Compute the cost of the flow given by flowDict on graph G. @@ -248,7 +252,7 @@ def cost_of_flow(G, flowDict, weight="weight"): return sum((flowDict[u][v] * d.get(weight, 0) for u, v, d in G.edges(data=True))) -@nx._dispatch(edge_attrs={"capacity": float("inf"), "weight": 0}) +@nx._dispatchable(edge_attrs={"capacity": float("inf"), "weight": 0}) def max_flow_min_cost(G, s, t, capacity="capacity", weight="weight"): """Returns a maximum (s, t)-flow of minimum cost. diff --git a/networkx/algorithms/flow/networksimplex.py b/networkx/algorithms/flow/networksimplex.py index 9fa3589a0c9..a9822d96880 100644 --- a/networkx/algorithms/flow/networksimplex.py +++ b/networkx/algorithms/flow/networksimplex.py @@ -326,7 +326,9 @@ def find_leaving_edge(self, Wn, We): @not_implemented_for("undirected") -@nx._dispatch(node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0}) +@nx._dispatchable( + node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0} +) def network_simplex(G, demand="demand", capacity="capacity", weight="weight"): r"""Find a minimum cost flow satisfying all demands in digraph G. diff --git a/networkx/algorithms/flow/preflowpush.py b/networkx/algorithms/flow/preflowpush.py index 05b982ba1ff..50f6c8503a7 100644 --- a/networkx/algorithms/flow/preflowpush.py +++ b/networkx/algorithms/flow/preflowpush.py @@ -288,7 +288,7 @@ def global_relabel(from_sink): return R -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "residual?": 4}, edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, diff --git a/networkx/algorithms/flow/shortestaugmentingpath.py b/networkx/algorithms/flow/shortestaugmentingpath.py index d06a88b7bc8..ba50f7acb0c 100644 --- a/networkx/algorithms/flow/shortestaugmentingpath.py +++ b/networkx/algorithms/flow/shortestaugmentingpath.py @@ -163,7 +163,7 @@ def relabel(u): return R -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "residual?": 4}, edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, diff --git a/networkx/algorithms/flow/utils.py b/networkx/algorithms/flow/utils.py index 349be4b302a..20ee78e0ae6 100644 --- a/networkx/algorithms/flow/utils.py +++ b/networkx/algorithms/flow/utils.py @@ -72,7 +72,7 @@ def clear_work(self): self._work = 0 -@nx._dispatch(edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(edge_attrs={"capacity": float("inf")}) def build_residual_network(G, capacity): """Build a residual network and initialize a zero flow. @@ -154,7 +154,7 @@ def build_residual_network(G, capacity): return R -@nx._dispatch( +@nx._dispatchable( graphs="R", preserve_edge_attrs={"R": {"capacity": float("inf")}}, preserve_graph_attrs=True, @@ -176,7 +176,7 @@ def detect_unboundedness(R, s, t): q.append(v) -@nx._dispatch(graphs={"G": 0, "R": 1}, preserve_edge_attrs={"R": {"flow": None}}) +@nx._dispatchable(graphs={"G": 0, "R": 1}, preserve_edge_attrs={"R": {"flow": None}}) def build_flow_dict(G, R): """Build a flow dictionary from a residual network.""" flow_dict = {} diff --git a/networkx/algorithms/graph_hashing.py b/networkx/algorithms/graph_hashing.py index 0b69b08480c..bbe80671e85 100644 --- a/networkx/algorithms/graph_hashing.py +++ b/networkx/algorithms/graph_hashing.py @@ -37,7 +37,7 @@ def _neighborhood_aggregate(G, node, node_labels, edge_attr=None): return node_labels[node] + "".join(sorted(label_list)) -@nx._dispatch(edge_attrs={"edge_attr": None}, node_attrs="node_attr") +@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr") def weisfeiler_lehman_graph_hash( G, edge_attr=None, node_attr=None, iterations=3, digest_size=16 ): @@ -160,7 +160,7 @@ def weisfeiler_lehman_step(G, labels, edge_attr=None): return _hash_label(str(tuple(subgraph_hash_counts)), digest_size) -@nx._dispatch(edge_attrs={"edge_attr": None}, node_attrs="node_attr") +@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr") def weisfeiler_lehman_subgraph_hashes( G, edge_attr=None, node_attr=None, iterations=3, digest_size=16 ): diff --git a/networkx/algorithms/graphical.py b/networkx/algorithms/graphical.py index cb1664427fd..b2ce6c33a9a 100644 --- a/networkx/algorithms/graphical.py +++ b/networkx/algorithms/graphical.py @@ -14,7 +14,7 @@ ] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_graphical(sequence, method="eg"): """Returns True if sequence is a valid degree sequence. @@ -93,7 +93,7 @@ def _basic_graphical_tests(deg_sequence): return dmax, dmin, dsum, n, num_degs -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_valid_degree_sequence_havel_hakimi(deg_sequence): r"""Returns True if deg_sequence can be realized by a simple graph. @@ -183,7 +183,7 @@ def is_valid_degree_sequence_havel_hakimi(deg_sequence): return True -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_valid_degree_sequence_erdos_gallai(deg_sequence): r"""Returns True if deg_sequence can be realized by a simple graph. @@ -274,7 +274,7 @@ def is_valid_degree_sequence_erdos_gallai(deg_sequence): return True -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_multigraphical(sequence): """Returns True if some multigraph can realize the sequence. @@ -325,7 +325,7 @@ def is_multigraphical(sequence): return True -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_pseudographical(sequence): """Returns True if some pseudograph can realize the sequence. @@ -372,7 +372,7 @@ def is_pseudographical(sequence): return sum(deg_sequence) % 2 == 0 and min(deg_sequence) >= 0 -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_digraphical(in_sequence, out_sequence): r"""Returns True if some directed graph can realize the in- and out-degree sequences. diff --git a/networkx/algorithms/hierarchy.py b/networkx/algorithms/hierarchy.py index 6dc63a741b5..4bb01cb4568 100644 --- a/networkx/algorithms/hierarchy.py +++ b/networkx/algorithms/hierarchy.py @@ -6,7 +6,7 @@ __all__ = ["flow_hierarchy"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def flow_hierarchy(G, weight=None): """Returns the flow hierarchy of a directed network. diff --git a/networkx/algorithms/hybrid.py b/networkx/algorithms/hybrid.py index 347f5c2f199..c98774e002e 100644 --- a/networkx/algorithms/hybrid.py +++ b/networkx/algorithms/hybrid.py @@ -10,7 +10,7 @@ __all__ = ["kl_connected_subgraph", "is_kl_connected"] -@nx._dispatch +@nx._dispatchable def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False): """Returns the maximum locally `(k, l)`-connected subgraph of `G`. @@ -115,7 +115,7 @@ def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False): return H -@nx._dispatch +@nx._dispatchable def is_kl_connected(G, k, l, low_memory=False): """Returns True if and only if `G` is locally `(k, l)`-connected. diff --git a/networkx/algorithms/isolate.py b/networkx/algorithms/isolate.py index f9983282a63..23ac23875db 100644 --- a/networkx/algorithms/isolate.py +++ b/networkx/algorithms/isolate.py @@ -6,7 +6,7 @@ __all__ = ["is_isolate", "isolates", "number_of_isolates"] -@nx._dispatch +@nx._dispatchable def is_isolate(G, n): """Determines whether a node is an isolate. @@ -39,7 +39,7 @@ def is_isolate(G, n): return G.degree(n) == 0 -@nx._dispatch +@nx._dispatchable def isolates(G): """Iterator over isolates in the graph. @@ -85,7 +85,7 @@ def isolates(G): return (n for n, d in G.degree() if d == 0) -@nx._dispatch +@nx._dispatchable def number_of_isolates(G): """Returns the number of isolates in the graph. diff --git a/networkx/algorithms/isomorphism/isomorph.py b/networkx/algorithms/isomorphism/isomorph.py index 6f562400a3b..00395b71cc1 100644 --- a/networkx/algorithms/isomorphism/isomorph.py +++ b/networkx/algorithms/isomorphism/isomorph.py @@ -12,7 +12,7 @@ ] -@nx._dispatch(graphs={"G1": 0, "G2": 1}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) def could_be_isomorphic(G1, G2): """Returns False if graphs are definitely not isomorphic. True does NOT guarantee isomorphism. @@ -60,7 +60,7 @@ def could_be_isomorphic(G1, G2): graph_could_be_isomorphic = could_be_isomorphic -@nx._dispatch(graphs={"G1": 0, "G2": 1}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) def fast_could_be_isomorphic(G1, G2): """Returns False if graphs are definitely not isomorphic. @@ -101,7 +101,7 @@ def fast_could_be_isomorphic(G1, G2): fast_graph_could_be_isomorphic = fast_could_be_isomorphic -@nx._dispatch(graphs={"G1": 0, "G2": 1}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) def faster_could_be_isomorphic(G1, G2): """Returns False if graphs are definitely not isomorphic. @@ -134,7 +134,7 @@ def faster_could_be_isomorphic(G1, G2): faster_graph_could_be_isomorphic = faster_could_be_isomorphic -@nx._dispatch( +@nx._dispatchable( graphs={"G1": 0, "G2": 1}, preserve_edge_attrs="edge_match", preserve_node_attrs="node_match", diff --git a/networkx/algorithms/isomorphism/tree_isomorphism.py b/networkx/algorithms/isomorphism/tree_isomorphism.py index c0ef821c1e3..71a5890a116 100644 --- a/networkx/algorithms/isomorphism/tree_isomorphism.py +++ b/networkx/algorithms/isomorphism/tree_isomorphism.py @@ -24,7 +24,7 @@ __all__ = ["rooted_tree_isomorphism", "tree_isomorphism"] -@nx._dispatch(graphs={"t1": 0, "t2": 2}) +@nx._dispatchable(graphs={"t1": 0, "t2": 2}) def root_trees(t1, root1, t2, root2): """Create a single digraph dT of free trees t1 and t2 # with roots root1 and root2 respectively @@ -72,7 +72,7 @@ def root_trees(t1, root1, t2, root2): # figure out the level of each node, with 0 at root -@nx._dispatch +@nx._dispatchable def assign_levels(G, root): level = {} level[root] = 0 @@ -102,7 +102,7 @@ def generate_isomorphism(v, w, M, ordered_children): generate_isomorphism(x, y, M, ordered_children) -@nx._dispatch(graphs={"t1": 0, "t2": 2}) +@nx._dispatchable(graphs={"t1": 0, "t2": 2}) def rooted_tree_isomorphism(t1, root1, t2, root2): """ Given two rooted trees `t1` and `t2`, @@ -211,7 +211,7 @@ def rooted_tree_isomorphism(t1, root1, t2, root2): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(graphs={"t1": 0, "t2": 1}) +@nx._dispatchable(graphs={"t1": 0, "t2": 1}) def tree_isomorphism(t1, t2): """ Given two undirected (or free) trees `t1` and `t2`, diff --git a/networkx/algorithms/isomorphism/vf2pp.py b/networkx/algorithms/isomorphism/vf2pp.py index 8cc83eb8a53..589e06447cc 100644 --- a/networkx/algorithms/isomorphism/vf2pp.py +++ b/networkx/algorithms/isomorphism/vf2pp.py @@ -97,7 +97,7 @@ ) -@nx._dispatch(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) def vf2pp_isomorphism(G1, G2, node_label=None, default_label=None): """Return an isomorphic mapping between `G1` and `G2` if it exists. @@ -128,7 +128,7 @@ def vf2pp_isomorphism(G1, G2, node_label=None, default_label=None): return None -@nx._dispatch(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) def vf2pp_is_isomorphic(G1, G2, node_label=None, default_label=None): """Examines whether G1 and G2 are isomorphic. @@ -157,7 +157,7 @@ def vf2pp_is_isomorphic(G1, G2, node_label=None, default_label=None): return False -@nx._dispatch(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) def vf2pp_all_isomorphisms(G1, G2, node_label=None, default_label=None): """Yields all the possible mappings between G1 and G2. diff --git a/networkx/algorithms/link_analysis/hits_alg.py b/networkx/algorithms/link_analysis/hits_alg.py index 8723c7d4932..e7b5141aa53 100644 --- a/networkx/algorithms/link_analysis/hits_alg.py +++ b/networkx/algorithms/link_analysis/hits_alg.py @@ -5,7 +5,7 @@ __all__ = ["hits"] -@nx._dispatch(preserve_edge_attrs={"G": {"weight": 1}}) +@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}}) def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True): """Returns HITS hubs and authorities values for nodes. diff --git a/networkx/algorithms/link_analysis/pagerank_alg.py b/networkx/algorithms/link_analysis/pagerank_alg.py index 371dd60edd9..2a4af6e5f12 100644 --- a/networkx/algorithms/link_analysis/pagerank_alg.py +++ b/networkx/algorithms/link_analysis/pagerank_alg.py @@ -6,7 +6,7 @@ __all__ = ["pagerank", "google_matrix"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def pagerank( G, alpha=0.85, @@ -172,7 +172,7 @@ def _pagerank_python( raise nx.PowerIterationFailedConvergence(max_iter) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def google_matrix( G, alpha=0.85, personalization=None, nodelist=None, weight="weight", dangling=None ): diff --git a/networkx/algorithms/link_analysis/tests/test_pagerank.py b/networkx/algorithms/link_analysis/tests/test_pagerank.py index 6a30f0cd12c..db0f8c8d5bb 100644 --- a/networkx/algorithms/link_analysis/tests/test_pagerank.py +++ b/networkx/algorithms/link_analysis/tests/test_pagerank.py @@ -83,7 +83,7 @@ def test_numpy_pagerank(self): for n in G: assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4) - # This additionally tests the @nx._dispatch mechanism, treating + # This additionally tests the @nx._dispatchable mechanism, treating # nx.google_matrix as if it were a re-implementation from another package @pytest.mark.parametrize("wrapper", [lambda x: x, dispatch_interface.convert]) def test_google_matrix(self, wrapper): diff --git a/networkx/algorithms/link_prediction.py b/networkx/algorithms/link_prediction.py index 21e89fbf160..56f7bc5c1ba 100644 --- a/networkx/algorithms/link_prediction.py +++ b/networkx/algorithms/link_prediction.py @@ -48,7 +48,7 @@ def _apply_prediction(G, func, ebunch=None): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def resource_allocation_index(G, ebunch=None): r"""Compute the resource allocation index of all node pairs in ebunch. @@ -111,7 +111,7 @@ def predict(u, v): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def jaccard_coefficient(G, ebunch=None): r"""Compute the Jaccard coefficient of all node pairs in ebunch. @@ -176,7 +176,7 @@ def predict(u, v): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def adamic_adar_index(G, ebunch=None): r"""Compute the Adamic-Adar index of all node pairs in ebunch. @@ -240,7 +240,7 @@ def predict(u, v): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def common_neighbor_centrality(G, ebunch=None, alpha=0.8): r"""Return the CCPA score for each pair of nodes. @@ -349,7 +349,7 @@ def predict(u, v): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def preferential_attachment(G, ebunch=None): r"""Compute the preferential attachment score of all node pairs in ebunch. @@ -411,7 +411,7 @@ def predict(u, v): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(node_attrs="community") +@nx._dispatchable(node_attrs="community") def cn_soundarajan_hopcroft(G, ebunch=None, community="community"): r"""Count the number of common neighbors of all node pairs in ebunch using community information. @@ -497,7 +497,7 @@ def predict(u, v): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(node_attrs="community") +@nx._dispatchable(node_attrs="community") def ra_index_soundarajan_hopcroft(G, ebunch=None, community="community"): r"""Compute the resource allocation index of all node pairs in ebunch using community information. @@ -584,7 +584,7 @@ def predict(u, v): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(node_attrs="community") +@nx._dispatchable(node_attrs="community") def within_inter_cluster(G, ebunch=None, delta=0.001, community="community"): """Compute the ratio of within- and inter-cluster common neighbors of all node pairs in ebunch. diff --git a/networkx/algorithms/lowest_common_ancestors.py b/networkx/algorithms/lowest_common_ancestors.py index 3cad18bff39..f695ec208d3 100644 --- a/networkx/algorithms/lowest_common_ancestors.py +++ b/networkx/algorithms/lowest_common_ancestors.py @@ -14,7 +14,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def all_pairs_lowest_common_ancestor(G, pairs=None): """Return the lowest common ancestor of all pairs or the provided pairs @@ -112,7 +112,7 @@ def generate_lca_from_pairs(G, pairs): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def lowest_common_ancestor(G, node1, node2, default=None): """Compute the lowest common ancestor of the given pair of nodes. @@ -150,7 +150,7 @@ def lowest_common_ancestor(G, node1, node2, default=None): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def tree_all_pairs_lowest_common_ancestor(G, root=None, pairs=None): r"""Yield the lowest common ancestor for sets of pairs in a tree. diff --git a/networkx/algorithms/matching.py b/networkx/algorithms/matching.py index f44dc9e67cd..f346c2e052f 100644 --- a/networkx/algorithms/matching.py +++ b/networkx/algorithms/matching.py @@ -17,7 +17,7 @@ @not_implemented_for("multigraph") @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def maximal_matching(G): r"""Find a maximal matching in the graph. @@ -82,7 +82,7 @@ def matching_dict_to_set(matching): return edges -@nx._dispatch +@nx._dispatchable def is_matching(G, matching): """Return True if ``matching`` is a valid matching of ``G`` @@ -143,7 +143,7 @@ def is_matching(G, matching): return True -@nx._dispatch +@nx._dispatchable def is_maximal_matching(G, matching): """Return True if ``matching`` is a maximal matching of ``G`` @@ -205,7 +205,7 @@ def is_maximal_matching(G, matching): return True -@nx._dispatch +@nx._dispatchable def is_perfect_matching(G, matching): """Return True if ``matching`` is a perfect matching for ``G`` @@ -259,7 +259,7 @@ def is_perfect_matching(G, matching): @not_implemented_for("multigraph") @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def min_weight_matching(G, weight="weight"): """Computing a minimum-weight maximal matching of G. @@ -320,7 +320,7 @@ def min_weight_matching(G, weight="weight"): @not_implemented_for("multigraph") @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def max_weight_matching(G, maxcardinality=False, weight="weight"): """Compute a maximum-weighted matching of G. diff --git a/networkx/algorithms/minors/contraction.py b/networkx/algorithms/minors/contraction.py index 1b4da352296..08d1b3497e6 100644 --- a/networkx/algorithms/minors/contraction.py +++ b/networkx/algorithms/minors/contraction.py @@ -94,7 +94,7 @@ def equivalence_classes(iterable, relation): return {frozenset(block) for block in blocks} -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def quotient_graph( G, partition, @@ -425,7 +425,7 @@ def edge_data(b, c): return H -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def contracted_nodes(G, u, v, self_loops=True, copy=True): """Returns the graph that results from contracting `u` and `v`. @@ -560,7 +560,7 @@ def contracted_nodes(G, u, v, self_loops=True, copy=True): identified_nodes = contracted_nodes -@nx._dispatch(preserve_edge_attrs=True) +@nx._dispatchable(preserve_edge_attrs=True) def contracted_edge(G, edge, self_loops=True, copy=True): """Returns the graph that results from contracting the specified edge. diff --git a/networkx/algorithms/mis.py b/networkx/algorithms/mis.py index 00d101c5288..fc70514d9ea 100644 --- a/networkx/algorithms/mis.py +++ b/networkx/algorithms/mis.py @@ -10,7 +10,7 @@ @not_implemented_for("directed") @py_random_state(2) -@nx._dispatch +@nx._dispatchable def maximal_independent_set(G, nodes=None, seed=None): """Returns a random maximal independent set guaranteed to contain a given set of nodes. diff --git a/networkx/algorithms/moral.py b/networkx/algorithms/moral.py index af187259251..8532f2b261f 100644 --- a/networkx/algorithms/moral.py +++ b/networkx/algorithms/moral.py @@ -9,7 +9,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def moral_graph(G): r"""Return the Moral Graph diff --git a/networkx/algorithms/node_classification.py b/networkx/algorithms/node_classification.py index c1b46545abb..42e7e6ba2ff 100644 --- a/networkx/algorithms/node_classification.py +++ b/networkx/algorithms/node_classification.py @@ -28,7 +28,7 @@ @nx.utils.not_implemented_for("directed") -@nx._dispatch(node_attrs="label_name") +@nx._dispatchable(node_attrs="label_name") def harmonic_function(G, max_iter=30, label_name="label"): """Node classification by Harmonic function @@ -105,7 +105,7 @@ def harmonic_function(G, max_iter=30, label_name="label"): @nx.utils.not_implemented_for("directed") -@nx._dispatch(node_attrs="label_name") +@nx._dispatchable(node_attrs="label_name") def local_and_global_consistency(G, alpha=0.99, max_iter=30, label_name="label"): """Node classification by Local and Global Consistency diff --git a/networkx/algorithms/non_randomness.py b/networkx/algorithms/non_randomness.py index 777cecbc969..88e8fd08e90 100644 --- a/networkx/algorithms/non_randomness.py +++ b/networkx/algorithms/non_randomness.py @@ -11,7 +11,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def non_randomness(G, k=None, weight="weight"): """Compute the non-randomness of graph G. diff --git a/networkx/algorithms/operators/all.py b/networkx/algorithms/operators/all.py index 1a9317b168b..4e0c2173b9c 100644 --- a/networkx/algorithms/operators/all.py +++ b/networkx/algorithms/operators/all.py @@ -7,7 +7,7 @@ __all__ = ["union_all", "compose_all", "disjoint_union_all", "intersection_all"] -@nx._dispatch(graphs="[graphs]", preserve_all_attrs=True) +@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True) def union_all(graphs, rename=()): """Returns the union of all graphs. @@ -110,7 +110,7 @@ def label(x): return R -@nx._dispatch(graphs="[graphs]", preserve_all_attrs=True) +@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True) def disjoint_union_all(graphs): """Returns the disjoint union of all graphs. @@ -164,7 +164,7 @@ def yield_relabeled(graphs): return R -@nx._dispatch(graphs="[graphs]", preserve_all_attrs=True) +@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True) def compose_all(graphs): """Returns the composition of all graphs. @@ -230,7 +230,7 @@ def compose_all(graphs): return R -@nx._dispatch(graphs="[graphs]") +@nx._dispatchable(graphs="[graphs]") def intersection_all(graphs): """Returns a new graph that contains only the nodes and the edges that exist in all graphs. diff --git a/networkx/algorithms/operators/binary.py b/networkx/algorithms/operators/binary.py index c36ea26c25c..9a9b5891e9a 100644 --- a/networkx/algorithms/operators/binary.py +++ b/networkx/algorithms/operators/binary.py @@ -15,7 +15,7 @@ _G_H = {"G": 0, "H": 1} -@nx._dispatch(graphs=_G_H, preserve_all_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True) def union(G, H, rename=()): """Combine graphs G and H. The names of nodes must be unique. @@ -71,7 +71,7 @@ def union(G, H, rename=()): return nx.union_all([G, H], rename) -@nx._dispatch(graphs=_G_H, preserve_all_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True) def disjoint_union(G, H): """Combine graphs G and H. The nodes are assumed to be unique (disjoint). @@ -125,7 +125,7 @@ def disjoint_union(G, H): return nx.disjoint_union_all([G, H]) -@nx._dispatch(graphs=_G_H) +@nx._dispatchable(graphs=_G_H) def intersection(G, H): """Returns a new graph that contains only the nodes and the edges that exist in both G and H. @@ -170,7 +170,7 @@ def intersection(G, H): return nx.intersection_all([G, H]) -@nx._dispatch(graphs=_G_H) +@nx._dispatchable(graphs=_G_H) def difference(G, H): """Returns a new graph that contains the edges that exist in G but not in H. @@ -225,7 +225,7 @@ def difference(G, H): return R -@nx._dispatch(graphs=_G_H) +@nx._dispatchable(graphs=_G_H) def symmetric_difference(G, H): """Returns new graph with edges that exist in either G or H but not both. @@ -288,7 +288,7 @@ def symmetric_difference(G, H): return R -@nx._dispatch(graphs=_G_H, preserve_all_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True) def compose(G, H): """Compose graph G with H by combining nodes and edges into a single graph. @@ -365,7 +365,7 @@ def compose(G, H): return nx.compose_all([G, H]) -@nx._dispatch(graphs=_G_H, preserve_all_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True) def full_join(G, H, rename=(None, None)): """Returns the full join of graphs G and H. diff --git a/networkx/algorithms/operators/product.py b/networkx/algorithms/operators/product.py index e7da4fc73be..9a8063c91e2 100644 --- a/networkx/algorithms/operators/product.py +++ b/networkx/algorithms/operators/product.py @@ -123,7 +123,7 @@ def _init_product_graph(G, H): return GH -@nx._dispatch(graphs=_G_H, preserve_node_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True) def tensor_product(G, H): r"""Returns the tensor product of G and H. @@ -179,7 +179,7 @@ def tensor_product(G, H): return GH -@nx._dispatch(graphs=_G_H, preserve_node_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True) def cartesian_product(G, H): r"""Returns the Cartesian product of G and H. @@ -231,7 +231,7 @@ def cartesian_product(G, H): return GH -@nx._dispatch(graphs=_G_H, preserve_node_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True) def lexicographic_product(G, H): r"""Returns the lexicographic product of G and H. @@ -284,7 +284,7 @@ def lexicographic_product(G, H): return GH -@nx._dispatch(graphs=_G_H, preserve_node_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True) def strong_product(G, H): r"""Returns the strong product of G and H. @@ -342,7 +342,7 @@ def strong_product(G, H): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def power(G, k): """Returns the specified power of a graph. @@ -431,7 +431,7 @@ def power(G, k): @not_implemented_for("multigraph") -@nx._dispatch(graphs=_G_H) +@nx._dispatchable(graphs=_G_H) def rooted_product(G, H, root): """Return the rooted product of graphs G and H rooted at root in H. @@ -471,7 +471,7 @@ def rooted_product(G, H, root): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(graphs=_G_H) +@nx._dispatchable(graphs=_G_H) def corona_product(G, H): r"""Returns the Corona product of G and H. diff --git a/networkx/algorithms/operators/tests/test_binary.py b/networkx/algorithms/operators/tests/test_binary.py index 4ce6f9bf033..9693e6332f5 100644 --- a/networkx/algorithms/operators/tests/test_binary.py +++ b/networkx/algorithms/operators/tests/test_binary.py @@ -43,7 +43,7 @@ def test_intersection(): assert sorted(I.edges()) == [(2, 3)] ################## - # Tests for @nx._dispatch mechanism with multiple graph arguments + # Tests for @nx._dispatchable mechanism with multiple graph arguments # nx.intersection is called as if it were a re-implementation # from another package. ################### @@ -53,7 +53,7 @@ def test_intersection(): assert set(I2.nodes()) == {1, 2, 3, 4} assert sorted(I2.edges()) == [(2, 3)] # Only test if not performing auto convert testing of backend implementations - if not nx.utils.backends._dispatch._automatic_backends: + if not nx.utils.backends._dispatchable._automatic_backends: with pytest.raises(TypeError): nx.intersection(G2, H) with pytest.raises(TypeError): diff --git a/networkx/algorithms/operators/unary.py b/networkx/algorithms/operators/unary.py index ce6d9be9057..42309ee9ff0 100644 --- a/networkx/algorithms/operators/unary.py +++ b/networkx/algorithms/operators/unary.py @@ -4,7 +4,7 @@ __all__ = ["complement", "reverse"] -@nx._dispatch +@nx._dispatchable def complement(G): """Returns the graph complement of G. @@ -40,7 +40,7 @@ def complement(G): return R -@nx._dispatch +@nx._dispatchable def reverse(G, copy=True): """Returns the reverse directed graph of G. diff --git a/networkx/algorithms/planarity.py b/networkx/algorithms/planarity.py index 59206b412e0..db494481cd1 100644 --- a/networkx/algorithms/planarity.py +++ b/networkx/algorithms/planarity.py @@ -5,7 +5,7 @@ __all__ = ["check_planarity", "is_planar", "PlanarEmbedding"] -@nx._dispatch +@nx._dispatchable def is_planar(G): """Returns True if and only if `G` is planar. @@ -38,7 +38,7 @@ def is_planar(G): return check_planarity(G, counterexample=False)[0] -@nx._dispatch +@nx._dispatchable def check_planarity(G, counterexample=False): """Check if a graph is planar and return a counterexample or an embedding. @@ -114,7 +114,7 @@ def check_planarity(G, counterexample=False): return True, embedding -@nx._dispatch +@nx._dispatchable def check_planarity_recursive(G, counterexample=False): """Recursive version of :meth:`check_planarity`.""" planarity_state = LRPlanarity(G) @@ -130,7 +130,7 @@ def check_planarity_recursive(G, counterexample=False): return True, embedding -@nx._dispatch +@nx._dispatchable def get_counterexample(G): """Obtains a Kuratowski subgraph. @@ -169,7 +169,7 @@ def get_counterexample(G): return subgraph -@nx._dispatch +@nx._dispatchable def get_counterexample_recursive(G): """Recursive version of :meth:`get_counterexample`.""" diff --git a/networkx/algorithms/polynomials.py b/networkx/algorithms/polynomials.py index 57ecf0d09a9..217c7dbe37b 100644 --- a/networkx/algorithms/polynomials.py +++ b/networkx/algorithms/polynomials.py @@ -30,7 +30,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def tutte_polynomial(G): r"""Returns the Tutte polynomial of `G` @@ -180,7 +180,7 @@ def tutte_polynomial(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def chromatic_polynomial(G): r"""Returns the chromatic polynomial of `G` diff --git a/networkx/algorithms/reciprocity.py b/networkx/algorithms/reciprocity.py index cb36ae9d551..25b0fa1ba9c 100644 --- a/networkx/algorithms/reciprocity.py +++ b/networkx/algorithms/reciprocity.py @@ -8,7 +8,7 @@ @not_implemented_for("undirected", "multigraph") -@nx._dispatch +@nx._dispatchable def reciprocity(G, nodes=None): r"""Compute the reciprocity in a directed graph. @@ -76,7 +76,7 @@ def _reciprocity_iter(G, nodes): @not_implemented_for("undirected", "multigraph") -@nx._dispatch +@nx._dispatchable def overall_reciprocity(G): """Compute the reciprocity for the whole graph. diff --git a/networkx/algorithms/regular.py b/networkx/algorithms/regular.py index 52b5ed6ee81..01027cd8677 100644 --- a/networkx/algorithms/regular.py +++ b/networkx/algorithms/regular.py @@ -5,7 +5,7 @@ __all__ = ["is_regular", "is_k_regular", "k_factor"] -@nx._dispatch +@nx._dispatchable def is_regular(G): """Determines whether the graph ``G`` is a regular graph. @@ -44,7 +44,7 @@ def is_regular(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def is_k_regular(G, k): """Determines whether the graph ``G`` is a k-regular graph. @@ -71,7 +71,7 @@ def is_k_regular(G, k): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(preserve_edge_attrs=True) +@nx._dispatchable(preserve_edge_attrs=True) def k_factor(G, k, matching_weight="weight"): """Compute a k-factor of G diff --git a/networkx/algorithms/richclub.py b/networkx/algorithms/richclub.py index 6fe300fb538..f787f4254bf 100644 --- a/networkx/algorithms/richclub.py +++ b/networkx/algorithms/richclub.py @@ -10,7 +10,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def rich_club_coefficient(G, normalized=True, Q=100, seed=None): r"""Returns the rich-club coefficient of the graph `G`. diff --git a/networkx/algorithms/shortest_paths/astar.py b/networkx/algorithms/shortest_paths/astar.py index 64696353e40..1a36561fe80 100644 --- a/networkx/algorithms/shortest_paths/astar.py +++ b/networkx/algorithms/shortest_paths/astar.py @@ -9,7 +9,7 @@ __all__ = ["astar_path", "astar_path_length"] -@nx._dispatch(edge_attrs="weight", preserve_node_attrs="heuristic") +@nx._dispatchable(edge_attrs="weight", preserve_node_attrs="heuristic") def astar_path(G, source, target, heuristic=None, weight="weight", *, cutoff=None): """Returns a list of nodes in a shortest path between source and target using the A* ("A-star") algorithm. @@ -171,7 +171,7 @@ def heuristic(u, v): raise nx.NetworkXNoPath(f"Node {target} not reachable from {source}") -@nx._dispatch(edge_attrs="weight", preserve_node_attrs="heuristic") +@nx._dispatchable(edge_attrs="weight", preserve_node_attrs="heuristic") def astar_path_length( G, source, target, heuristic=None, weight="weight", *, cutoff=None ): diff --git a/networkx/algorithms/shortest_paths/dense.py b/networkx/algorithms/shortest_paths/dense.py index 08339b189dd..45fcac3249f 100644 --- a/networkx/algorithms/shortest_paths/dense.py +++ b/networkx/algorithms/shortest_paths/dense.py @@ -10,7 +10,7 @@ ] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def floyd_warshall_numpy(G, nodelist=None, weight="weight"): """Find all-pairs shortest path lengths using Floyd's algorithm. @@ -84,7 +84,7 @@ def floyd_warshall_numpy(G, nodelist=None, weight="weight"): return A -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def floyd_warshall_predecessor_and_distance(G, weight="weight"): """Find all-pairs shortest path lengths using Floyd's algorithm. @@ -167,7 +167,7 @@ def floyd_warshall_predecessor_and_distance(G, weight="weight"): return dict(pred), dict(dist) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def reconstruct_path(source, target, predecessors): """Reconstruct a path from source to target using the predecessors dict as returned by floyd_warshall_predecessor_and_distance @@ -211,7 +211,7 @@ def reconstruct_path(source, target, predecessors): return list(reversed(path)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def floyd_warshall(G, weight="weight"): """Find all-pairs shortest path lengths using Floyd's algorithm. diff --git a/networkx/algorithms/shortest_paths/generic.py b/networkx/algorithms/shortest_paths/generic.py index 1ed47aa7144..6d8d8c823e5 100644 --- a/networkx/algorithms/shortest_paths/generic.py +++ b/networkx/algorithms/shortest_paths/generic.py @@ -19,7 +19,7 @@ ] -@nx._dispatch +@nx._dispatchable def has_path(G, source, target): """Returns *True* if *G* has a path from *source* to *target*. @@ -40,7 +40,7 @@ def has_path(G, source, target): return True -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"): """Compute shortest paths in the graph. @@ -187,7 +187,7 @@ def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"): return paths -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def shortest_path_length(G, source=None, target=None, weight=None, method="dijkstra"): """Compute shortest path lengths in the graph. @@ -331,7 +331,7 @@ def shortest_path_length(G, source=None, target=None, weight=None, method="dijks return paths -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def average_shortest_path_length(G, weight=None, method=None): r"""Returns the average shortest path length. @@ -448,7 +448,7 @@ def path_length(v): return s / (n * (n - 1)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_shortest_paths(G, source, target, weight=None, method="dijkstra"): """Compute all shortest simple paths in the graph. @@ -526,7 +526,7 @@ def all_shortest_paths(G, source, target, weight=None, method="dijkstra"): return _build_paths_from_predecessors({source}, target, pred) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_all_shortest_paths(G, source, weight=None, method="dijkstra"): """Compute all shortest simple paths from the given source in the graph. @@ -602,7 +602,7 @@ def single_source_all_shortest_paths(G, source, weight=None, method="dijkstra"): pass -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_pairs_all_shortest_paths(G, weight=None, method="dijkstra"): """Compute all shortest paths between all nodes. diff --git a/networkx/algorithms/shortest_paths/unweighted.py b/networkx/algorithms/shortest_paths/unweighted.py index bc329c55263..c7eea6fc625 100644 --- a/networkx/algorithms/shortest_paths/unweighted.py +++ b/networkx/algorithms/shortest_paths/unweighted.py @@ -17,7 +17,7 @@ ] -@nx._dispatch +@nx._dispatchable def single_source_shortest_path_length(G, source, cutoff=None): """Compute the shortest path lengths from source to all reachable nodes. @@ -95,7 +95,7 @@ def _single_shortest_path_length(adj, firstlevel, cutoff): return -@nx._dispatch +@nx._dispatchable def single_target_shortest_path_length(G, target, cutoff=None): """Compute the shortest path lengths to target from all reachable nodes. @@ -154,7 +154,7 @@ def single_target_shortest_path_length(G, target, cutoff=None): return dict(_single_shortest_path_length(adj, nextlevel, cutoff)) -@nx._dispatch +@nx._dispatchable def all_pairs_shortest_path_length(G, cutoff=None): """Computes the shortest path lengths between all nodes in `G`. @@ -199,7 +199,7 @@ def all_pairs_shortest_path_length(G, cutoff=None): yield (n, length(G, n, cutoff=cutoff)) -@nx._dispatch +@nx._dispatchable def bidirectional_shortest_path(G, source, target): """Returns a list of nodes in a shortest path between source and target. @@ -315,7 +315,7 @@ def _bidirectional_pred_succ(G, source, target): raise nx.NetworkXNoPath(f"No path between {source} and {target}.") -@nx._dispatch +@nx._dispatchable def single_source_shortest_path(G, source, cutoff=None): """Compute shortest path between source and all other nodes reachable from source. @@ -399,7 +399,7 @@ def _single_shortest_path(adj, firstlevel, paths, cutoff, join): return paths -@nx._dispatch +@nx._dispatchable def single_target_shortest_path(G, target, cutoff=None): """Compute shortest path to target from all nodes that reach target. @@ -451,7 +451,7 @@ def join(p1, p2): return dict(_single_shortest_path(adj, nextlevel, paths, cutoff, join)) -@nx._dispatch +@nx._dispatchable def all_pairs_shortest_path(G, cutoff=None): """Compute shortest paths between all nodes. @@ -491,7 +491,7 @@ def all_pairs_shortest_path(G, cutoff=None): yield (n, single_source_shortest_path(G, n, cutoff=cutoff)) -@nx._dispatch +@nx._dispatchable def predecessor(G, source, target=None, cutoff=None, return_seen=None): """Returns dict of predecessors for the path from source to all nodes in G. diff --git a/networkx/algorithms/shortest_paths/weighted.py b/networkx/algorithms/shortest_paths/weighted.py index bbbb03d9eee..e013fe24f83 100644 --- a/networkx/algorithms/shortest_paths/weighted.py +++ b/networkx/algorithms/shortest_paths/weighted.py @@ -78,7 +78,7 @@ def _weight_function(G, weight): return lambda u, v, data: data.get(weight, 1) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def dijkstra_path(G, source, target, weight="weight"): """Returns the shortest weighted path from source to target in G. @@ -169,7 +169,7 @@ def dijkstra_path(G, source, target, weight="weight"): return path -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def dijkstra_path_length(G, source, target, weight="weight"): """Returns the shortest weighted path length in G from source to target. @@ -249,7 +249,7 @@ def dijkstra_path_length(G, source, target, weight="weight"): raise nx.NetworkXNoPath(f"Node {target} not reachable from {source}") from err -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_dijkstra_path(G, source, cutoff=None, weight="weight"): """Find shortest weighted paths in G from a source node. @@ -314,7 +314,7 @@ def single_source_dijkstra_path(G, source, cutoff=None, weight="weight"): return multi_source_dijkstra_path(G, {source}, cutoff=cutoff, weight=weight) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_dijkstra_path_length(G, source, cutoff=None, weight="weight"): """Find shortest weighted path lengths in G from a source node. @@ -386,7 +386,7 @@ def single_source_dijkstra_path_length(G, source, cutoff=None, weight="weight"): return multi_source_dijkstra_path_length(G, {source}, cutoff=cutoff, weight=weight) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_dijkstra(G, source, target=None, cutoff=None, weight="weight"): """Find shortest weighted paths and lengths from a source node. @@ -488,7 +488,7 @@ def single_source_dijkstra(G, source, target=None, cutoff=None, weight="weight") ) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def multi_source_dijkstra_path(G, sources, cutoff=None, weight="weight"): """Find shortest weighted paths in G from a given set of source nodes. @@ -562,7 +562,7 @@ def multi_source_dijkstra_path(G, sources, cutoff=None, weight="weight"): return path -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def multi_source_dijkstra_path_length(G, sources, cutoff=None, weight="weight"): """Find shortest weighted path lengths in G from a given set of source nodes. @@ -644,7 +644,7 @@ def multi_source_dijkstra_path_length(G, sources, cutoff=None, weight="weight"): return _dijkstra_multisource(G, sources, weight, cutoff=cutoff) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def multi_source_dijkstra(G, sources, target=None, cutoff=None, weight="weight"): """Find shortest weighted paths and lengths from a given set of source nodes. @@ -881,7 +881,7 @@ def _dijkstra_multisource( return dist -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight="weight"): """Compute weighted shortest path length and predecessors. @@ -954,7 +954,7 @@ def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight="weight"): return (pred, _dijkstra(G, source, weight, pred=pred, cutoff=cutoff)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_pairs_dijkstra(G, cutoff=None, weight="weight"): """Find shortest weighted paths and lengths between all nodes. @@ -1023,7 +1023,7 @@ def all_pairs_dijkstra(G, cutoff=None, weight="weight"): yield (n, (dist, path)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_pairs_dijkstra_path_length(G, cutoff=None, weight="weight"): """Compute shortest path lengths between all nodes in a weighted graph. @@ -1082,7 +1082,7 @@ def all_pairs_dijkstra_path_length(G, cutoff=None, weight="weight"): yield (n, length(G, n, cutoff=cutoff, weight=weight)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_pairs_dijkstra_path(G, cutoff=None, weight="weight"): """Compute shortest paths between all nodes in a weighted graph. @@ -1136,7 +1136,7 @@ def all_pairs_dijkstra_path(G, cutoff=None, weight="weight"): yield (n, path(G, n, cutoff=cutoff, weight=weight)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def bellman_ford_predecessor_and_distance( G, source, target=None, weight="weight", heuristic=False ): @@ -1484,7 +1484,7 @@ def _inner_bellman_ford( return None -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def bellman_ford_path(G, source, target, weight="weight"): """Returns the shortest path from source to target in a weighted graph G. @@ -1543,7 +1543,7 @@ def bellman_ford_path(G, source, target, weight="weight"): return path -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def bellman_ford_path_length(G, source, target, weight="weight"): """Returns the shortest path length from source to target in a weighted graph. @@ -1614,7 +1614,7 @@ def bellman_ford_path_length(G, source, target, weight="weight"): raise nx.NetworkXNoPath(f"node {target} not reachable from {source}") from err -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_bellman_ford_path(G, source, weight="weight"): """Compute shortest path between source and all other reachable nodes for a weighted graph. @@ -1670,7 +1670,7 @@ def single_source_bellman_ford_path(G, source, weight="weight"): return path -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_bellman_ford_path_length(G, source, weight="weight"): """Compute the shortest path length between source and all other reachable nodes for a weighted graph. @@ -1733,7 +1733,7 @@ def single_source_bellman_ford_path_length(G, source, weight="weight"): return _bellman_ford(G, [source], weight) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_bellman_ford(G, source, target=None, weight="weight"): """Compute shortest paths and lengths in a weighted graph G. @@ -1827,7 +1827,7 @@ def single_source_bellman_ford(G, source, target=None, weight="weight"): raise nx.NetworkXNoPath(msg) from err -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_pairs_bellman_ford_path_length(G, weight="weight"): """Compute shortest path lengths between all nodes in a weighted graph. @@ -1882,7 +1882,7 @@ def all_pairs_bellman_ford_path_length(G, weight="weight"): yield (n, dict(length(G, n, weight=weight))) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_pairs_bellman_ford_path(G, weight="weight"): """Compute shortest paths between all nodes in a weighted graph. @@ -1932,7 +1932,7 @@ def all_pairs_bellman_ford_path(G, weight="weight"): yield (n, path(G, n, weight=weight)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def goldberg_radzik(G, source, weight="weight"): """Compute shortest path lengths and predecessors on shortest paths in weighted graphs. @@ -2119,7 +2119,7 @@ def relax(to_scan): return pred, d -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def negative_edge_cycle(G, weight="weight", heuristic=True): """Returns True if there exists a negative edge cycle anywhere in G. @@ -2190,7 +2190,7 @@ def negative_edge_cycle(G, weight="weight", heuristic=True): return False -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def find_negative_cycle(G, source, weight="weight"): """Returns a cycle with negative total weight if it exists. @@ -2283,7 +2283,7 @@ def find_negative_cycle(G, source, weight="weight"): raise nx.NetworkXUnbounded(msg) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def bidirectional_dijkstra(G, source, target, weight="weight"): r"""Dijkstra's algorithm for shortest paths using bidirectional search. @@ -2431,7 +2431,7 @@ def bidirectional_dijkstra(G, source, target, weight="weight"): raise nx.NetworkXNoPath(f"No path between {source} and {target}.") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def johnson(G, weight="weight"): r"""Uses Johnson's Algorithm to compute shortest paths. diff --git a/networkx/algorithms/similarity.py b/networkx/algorithms/similarity.py index 183b41f66b6..e36d23dcb83 100644 --- a/networkx/algorithms/similarity.py +++ b/networkx/algorithms/similarity.py @@ -36,7 +36,7 @@ def debug_print(*args, **kwargs): print(*args, **kwargs) -@nx._dispatch( +@nx._dispatchable( graphs={"G1": 0, "G2": 1}, preserve_edge_attrs=True, preserve_node_attrs=True ) def graph_edit_distance( @@ -210,7 +210,7 @@ def graph_edit_distance( return bestcost -@nx._dispatch(graphs={"G1": 0, "G2": 1}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) def optimal_edit_paths( G1, G2, @@ -382,7 +382,7 @@ def optimal_edit_paths( return paths, bestcost -@nx._dispatch(graphs={"G1": 0, "G2": 1}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) def optimize_graph_edit_distance( G1, G2, @@ -533,7 +533,7 @@ def optimize_graph_edit_distance( yield cost -@nx._dispatch( +@nx._dispatchable( graphs={"G1": 0, "G2": 1}, preserve_edge_attrs=True, preserve_node_attrs=True ) def optimize_edit_paths( @@ -1212,7 +1212,7 @@ def prune(cost): yield list(vertex_path), list(edge_path), cost -@nx._dispatch +@nx._dispatchable def simrank_similarity( G, source=None, @@ -1525,7 +1525,7 @@ def _simrank_similarity_numpy( return newsim -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def panther_similarity( G, source, k=5, path_length=5, c=0.5, delta=0.1, eps=None, weight="weight" ): @@ -1661,7 +1661,7 @@ def panther_similarity( return top_k_with_val -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def generate_random_paths( G, sample_size, path_length=5, index_map=None, weight="weight" ): diff --git a/networkx/algorithms/simple_paths.py b/networkx/algorithms/simple_paths.py index 6324ad67e2d..be0890339e7 100644 --- a/networkx/algorithms/simple_paths.py +++ b/networkx/algorithms/simple_paths.py @@ -13,7 +13,7 @@ ] -@nx._dispatch +@nx._dispatchable def is_simple_path(G, nodes): """Returns True if and only if `nodes` form a simple path in `G`. @@ -91,7 +91,7 @@ def is_simple_path(G, nodes): return all(v in G[u] for u, v in pairwise(nodes)) -@nx._dispatch +@nx._dispatchable def all_simple_paths(G, source, target, cutoff=None): """Generate all simple paths in the graph G from source to target. @@ -257,7 +257,7 @@ def all_simple_paths(G, source, target, cutoff=None): yield [source] + [edge[1] for edge in edge_path] -@nx._dispatch +@nx._dispatchable def all_simple_edge_paths(G, source, target, cutoff=None): """Generate lists of edges for all simple paths in G from source to target. @@ -402,7 +402,7 @@ def _all_simple_edge_paths(G, source, targets, cutoff): @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def shortest_simple_paths(G, source, target, weight=None): """Generate all simple paths in the graph G from source to target, starting from shortest ones. diff --git a/networkx/algorithms/smallworld.py b/networkx/algorithms/smallworld.py index 172c4f9a879..1c89e243e7d 100644 --- a/networkx/algorithms/smallworld.py +++ b/networkx/algorithms/smallworld.py @@ -23,7 +23,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(3) -@nx._dispatch +@nx._dispatchable def random_reference(G, niter=1, connectivity=True, seed=None): """Compute a random graph by swapping edges of a given graph. @@ -121,7 +121,7 @@ def random_reference(G, niter=1, connectivity=True, seed=None): @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(4) -@nx._dispatch +@nx._dispatchable def lattice_reference(G, niter=5, D=None, connectivity=True, seed=None): """Latticize the given graph by swapping edges. @@ -245,7 +245,7 @@ def lattice_reference(G, niter=5, D=None, connectivity=True, seed=None): @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(3) -@nx._dispatch +@nx._dispatchable def sigma(G, niter=100, nrand=10, seed=None): """Returns the small-world coefficient (sigma) of the given graph. @@ -314,7 +314,7 @@ def sigma(G, niter=100, nrand=10, seed=None): @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(3) -@nx._dispatch +@nx._dispatchable def omega(G, niter=5, nrand=10, seed=None): """Returns the small-world coefficient (omega) of a graph diff --git a/networkx/algorithms/smetric.py b/networkx/algorithms/smetric.py index 80ae314bbdd..5a27014ee55 100644 --- a/networkx/algorithms/smetric.py +++ b/networkx/algorithms/smetric.py @@ -3,7 +3,7 @@ __all__ = ["s_metric"] -@nx._dispatch +@nx._dispatchable def s_metric(G, **kwargs): """Returns the s-metric [1]_ of graph. diff --git a/networkx/algorithms/sparsifiers.py b/networkx/algorithms/sparsifiers.py index 5890a5b1e9a..fedc16cdd2d 100644 --- a/networkx/algorithms/sparsifiers.py +++ b/networkx/algorithms/sparsifiers.py @@ -10,7 +10,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(3) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def spanner(G, stretch, weight=None, seed=None): """Returns a spanner of the given graph with the given stretch. diff --git a/networkx/algorithms/structuralholes.py b/networkx/algorithms/structuralholes.py index 6d4f33171dd..bae42d060af 100644 --- a/networkx/algorithms/structuralholes.py +++ b/networkx/algorithms/structuralholes.py @@ -5,7 +5,7 @@ __all__ = ["constraint", "local_constraint", "effective_size"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def mutual_weight(G, u, v, weight=None): """Returns the sum of the weights of the edge from `u` to `v` and the edge from `v` to `u` in `G`. @@ -28,7 +28,7 @@ def mutual_weight(G, u, v, weight=None): return a_uv + a_vu -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def normalized_mutual_weight(G, u, v, norm=sum, weight=None): """Returns normalized mutual weight of the edges from `u` to `v` with respect to the mutual weights of the neighbors of `u` in `G`. @@ -49,7 +49,7 @@ def normalized_mutual_weight(G, u, v, norm=sum, weight=None): return 0 if scale == 0 else mutual_weight(G, u, v, weight) / scale -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def effective_size(G, nodes=None, weight=None): r"""Returns the effective size of all nodes in the graph ``G``. @@ -162,7 +162,7 @@ def redundancy(G, u, v, weight=None): return effective_size -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def constraint(G, nodes=None, weight=None): r"""Returns the constraint on all nodes in the graph ``G``. @@ -223,7 +223,7 @@ def constraint(G, nodes=None, weight=None): return constraint -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def local_constraint(G, u, v, weight=None): r"""Returns the local constraint on the node ``u`` with respect to the node ``v`` in the graph ``G``. diff --git a/networkx/algorithms/summarization.py b/networkx/algorithms/summarization.py index 236810f0d98..f3364e32ee7 100644 --- a/networkx/algorithms/summarization.py +++ b/networkx/algorithms/summarization.py @@ -65,7 +65,7 @@ __all__ = ["dedensify", "snap_aggregation"] -@nx._dispatch +@nx._dispatchable def dedensify(G, threshold, prefix=None, copy=True): """Compresses neighborhoods around high-degree nodes @@ -404,7 +404,7 @@ def _snap_split(groups, neighbor_info, group_lookup, group_id): return groups -@nx._dispatch(node_attrs="[node_attributes]", edge_attrs="[edge_attributes]") +@nx._dispatchable(node_attrs="[node_attributes]", edge_attrs="[edge_attributes]") def snap_aggregation( G, node_attributes, diff --git a/networkx/algorithms/swap.py b/networkx/algorithms/swap.py index 926be49831e..1b6133e4a69 100644 --- a/networkx/algorithms/swap.py +++ b/networkx/algorithms/swap.py @@ -11,7 +11,7 @@ @nx.utils.not_implemented_for("undirected") @py_random_state(3) -@nx._dispatch +@nx._dispatchable def directed_edge_swap(G, *, nswap=1, max_tries=100, seed=None): """Swap three edges in a directed graph while keeping the node degrees fixed. @@ -131,7 +131,7 @@ def directed_edge_swap(G, *, nswap=1, max_tries=100, seed=None): @py_random_state(3) -@nx._dispatch +@nx._dispatchable def double_edge_swap(G, nswap=1, max_tries=100, seed=None): """Swap two edges in the graph while keeping the node degrees fixed. @@ -229,7 +229,7 @@ def double_edge_swap(G, nswap=1, max_tries=100, seed=None): @py_random_state(3) -@nx._dispatch +@nx._dispatchable def connected_double_edge_swap(G, nswap=1, _window_threshold=3, seed=None): """Attempts the specified number of double-edge swaps in the graph `G`. diff --git a/networkx/algorithms/tests/test_structuralholes.py b/networkx/algorithms/tests/test_structuralholes.py index 6f92baa4f32..215ce4530fa 100644 --- a/networkx/algorithms/tests/test_structuralholes.py +++ b/networkx/algorithms/tests/test_structuralholes.py @@ -52,7 +52,7 @@ def setup_method(self): ("G", "C"): 10, } - # This additionally tests the @nx._dispatch mechanism, treating + # This additionally tests the @nx._dispatchable mechanism, treating # nx.mutual_weight as if it were a re-implementation from another package @pytest.mark.parametrize("wrapper", [lambda x: x, dispatch_interface.convert]) def test_constraint_directed(self, wrapper): diff --git a/networkx/algorithms/threshold.py b/networkx/algorithms/threshold.py index 0839321de0d..1eb35a2c528 100644 --- a/networkx/algorithms/threshold.py +++ b/networkx/algorithms/threshold.py @@ -9,7 +9,7 @@ __all__ = ["is_threshold_graph", "find_threshold_graph"] -@nx._dispatch +@nx._dispatchable def is_threshold_graph(G): """ Returns `True` if `G` is a threshold graph. @@ -301,7 +301,7 @@ def weights_to_creation_sequence( # Manipulating NetworkX.Graphs in context of threshold graphs -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def threshold_graph(creation_sequence, create_using=None): """ Create a threshold graph from the creation sequence or compact @@ -353,7 +353,7 @@ def threshold_graph(creation_sequence, create_using=None): return G -@nx._dispatch +@nx._dispatchable def find_alternating_4_cycle(G): """ Returns False if there aren't any alternating 4 cycles. @@ -369,7 +369,7 @@ def find_alternating_4_cycle(G): return False -@nx._dispatch +@nx._dispatchable def find_threshold_graph(G, create_using=None): """ Returns a threshold subgraph that is close to largest in `G`. @@ -404,7 +404,7 @@ def find_threshold_graph(G, create_using=None): return threshold_graph(find_creation_sequence(G), create_using) -@nx._dispatch +@nx._dispatchable def find_creation_sequence(G): """ Find a threshold subgraph that is close to largest in G. diff --git a/networkx/algorithms/time_dependent.py b/networkx/algorithms/time_dependent.py index e83f42ad92c..597a3d157f3 100644 --- a/networkx/algorithms/time_dependent.py +++ b/networkx/algorithms/time_dependent.py @@ -8,7 +8,7 @@ @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch(node_attrs={"time": None, "weight": 1}) +@nx._dispatchable(node_attrs={"time": None, "weight": 1}) def cd_index(G, node, time_delta, *, time="time", weight=None): r"""Compute the CD index for `node` within the graph `G`. diff --git a/networkx/algorithms/tournament.py b/networkx/algorithms/tournament.py index 0b164cb3b16..6946afec799 100644 --- a/networkx/algorithms/tournament.py +++ b/networkx/algorithms/tournament.py @@ -65,7 +65,7 @@ def index_satisfying(iterable, condition): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def is_tournament(G): """Returns True if and only if `G` is a tournament. @@ -104,7 +104,7 @@ def is_tournament(G): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def hamiltonian_path(G): """Returns a Hamiltonian path in the given tournament graph. @@ -151,7 +151,7 @@ def hamiltonian_path(G): @py_random_state(1) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_tournament(n, seed=None): r"""Returns a random tournament graph on `n` nodes. @@ -186,7 +186,7 @@ def random_tournament(n, seed=None): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def score_sequence(G): """Returns the score sequence for the given tournament graph. @@ -217,7 +217,7 @@ def score_sequence(G): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def tournament_matrix(G): r"""Returns the tournament matrix for the given tournament graph. @@ -260,7 +260,7 @@ def tournament_matrix(G): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def is_reachable(G, s, t): """Decides whether there is a path from `s` to `t` in the tournament. @@ -349,7 +349,7 @@ def is_closed(G, nodes): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch(name="tournament_is_strongly_connected") +@nx._dispatchable(name="tournament_is_strongly_connected") def is_strongly_connected(G): """Decides whether the given tournament is strongly connected. diff --git a/networkx/algorithms/traversal/beamsearch.py b/networkx/algorithms/traversal/beamsearch.py index 9b339e306f7..ab90f44aedb 100644 --- a/networkx/algorithms/traversal/beamsearch.py +++ b/networkx/algorithms/traversal/beamsearch.py @@ -4,7 +4,7 @@ __all__ = ["bfs_beam_edges"] -@nx._dispatch +@nx._dispatchable def bfs_beam_edges(G, source, value, width=None): """Iterates over edges in a beam search. diff --git a/networkx/algorithms/traversal/breadth_first_search.py b/networkx/algorithms/traversal/breadth_first_search.py index 989c7f8622d..09bc695759a 100644 --- a/networkx/algorithms/traversal/breadth_first_search.py +++ b/networkx/algorithms/traversal/breadth_first_search.py @@ -16,7 +16,7 @@ ] -@nx._dispatch +@nx._dispatchable def generic_bfs_edges(G, source, neighbors=None, depth_limit=None, sort_neighbors=None): """Iterate over edges in a breadth-first search. @@ -130,7 +130,7 @@ def generic_bfs_edges(G, source, neighbors=None, depth_limit=None, sort_neighbor depth += 1 -@nx._dispatch +@nx._dispatchable def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None): """Iterate over edges in a breadth-first-search starting at source. @@ -218,7 +218,7 @@ def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None): yield from generic_bfs_edges(G, source, successors, depth_limit) -@nx._dispatch +@nx._dispatchable def bfs_tree(G, source, reverse=False, depth_limit=None, sort_neighbors=None): """Returns an oriented tree constructed from of a breadth-first-search starting at source. @@ -285,7 +285,7 @@ def bfs_tree(G, source, reverse=False, depth_limit=None, sort_neighbors=None): return T -@nx._dispatch +@nx._dispatchable def bfs_predecessors(G, source, depth_limit=None, sort_neighbors=None): """Returns an iterator of predecessors in breadth-first-search from source. @@ -350,7 +350,7 @@ def bfs_predecessors(G, source, depth_limit=None, sort_neighbors=None): yield (t, s) -@nx._dispatch +@nx._dispatchable def bfs_successors(G, source, depth_limit=None, sort_neighbors=None): """Returns an iterator of successors in breadth-first-search from source. @@ -423,7 +423,7 @@ def bfs_successors(G, source, depth_limit=None, sort_neighbors=None): yield (parent, children) -@nx._dispatch +@nx._dispatchable def bfs_layers(G, sources): """Returns an iterator of all the layers in breadth-first search traversal. @@ -481,7 +481,7 @@ def bfs_layers(G, sources): LEVEL_EDGE = "level" -@nx._dispatch +@nx._dispatchable def bfs_labeled_edges(G, sources): """Iterate over edges in a breadth-first search (BFS) labeled by type. @@ -556,7 +556,7 @@ def bfs_labeled_edges(G, sources): visit(u) -@nx._dispatch +@nx._dispatchable def descendants_at_distance(G, source, distance): """Returns all nodes at a fixed `distance` from `source` in `G`. diff --git a/networkx/algorithms/traversal/depth_first_search.py b/networkx/algorithms/traversal/depth_first_search.py index 185a99c3dd1..843a793aed6 100644 --- a/networkx/algorithms/traversal/depth_first_search.py +++ b/networkx/algorithms/traversal/depth_first_search.py @@ -14,7 +14,7 @@ ] -@nx._dispatch +@nx._dispatchable def dfs_edges(G, source=None, depth_limit=None): """Iterate over edges in a depth-first-search (DFS). @@ -100,7 +100,7 @@ def dfs_edges(G, source=None, depth_limit=None): depth_now -= 1 -@nx._dispatch +@nx._dispatchable def dfs_tree(G, source=None, depth_limit=None): """Returns oriented tree constructed from a depth-first-search from source. @@ -146,7 +146,7 @@ def dfs_tree(G, source=None, depth_limit=None): return T -@nx._dispatch +@nx._dispatchable def dfs_predecessors(G, source=None, depth_limit=None): """Returns dictionary of predecessors in depth-first-search from source. @@ -200,7 +200,7 @@ def dfs_predecessors(G, source=None, depth_limit=None): return {t: s for s, t in dfs_edges(G, source, depth_limit)} -@nx._dispatch +@nx._dispatchable def dfs_successors(G, source=None, depth_limit=None): """Returns dictionary of successors in depth-first-search from source. @@ -257,7 +257,7 @@ def dfs_successors(G, source=None, depth_limit=None): return dict(d) -@nx._dispatch +@nx._dispatchable def dfs_postorder_nodes(G, source=None, depth_limit=None): """Generate nodes in a depth-first-search post-ordering starting at source. @@ -309,7 +309,7 @@ def dfs_postorder_nodes(G, source=None, depth_limit=None): return (v for u, v, d in edges if d == "reverse") -@nx._dispatch +@nx._dispatchable def dfs_preorder_nodes(G, source=None, depth_limit=None): """Generate nodes in a depth-first-search pre-ordering starting at source. @@ -361,7 +361,7 @@ def dfs_preorder_nodes(G, source=None, depth_limit=None): return (v for u, v, d in edges if d == "forward") -@nx._dispatch +@nx._dispatchable def dfs_labeled_edges(G, source=None, depth_limit=None): """Iterate over edges in a depth-first-search (DFS) labeled by type. diff --git a/networkx/algorithms/traversal/edgebfs.py b/networkx/algorithms/traversal/edgebfs.py index c29ef5e0269..484ae12b5ea 100644 --- a/networkx/algorithms/traversal/edgebfs.py +++ b/networkx/algorithms/traversal/edgebfs.py @@ -16,7 +16,7 @@ __all__ = ["edge_bfs"] -@nx._dispatch +@nx._dispatchable def edge_bfs(G, source=None, orientation=None): """A directed, breadth-first-search of edges in `G`, beginning at `source`. diff --git a/networkx/algorithms/traversal/edgedfs.py b/networkx/algorithms/traversal/edgedfs.py index 1e583de6ec4..010f68246c9 100644 --- a/networkx/algorithms/traversal/edgedfs.py +++ b/networkx/algorithms/traversal/edgedfs.py @@ -14,7 +14,7 @@ __all__ = ["edge_dfs"] -@nx._dispatch +@nx._dispatchable def edge_dfs(G, source=None, orientation=None): """A directed, depth-first-search of edges in `G`, beginning at `source`. diff --git a/networkx/algorithms/tree/branchings.py b/networkx/algorithms/tree/branchings.py index 653266915e7..eebc99a751b 100644 --- a/networkx/algorithms/tree/branchings.py +++ b/networkx/algorithms/tree/branchings.py @@ -73,7 +73,7 @@ def _max_weight(weight): return weight -@nx._dispatch(edge_attrs={"attr": "default"}) +@nx._dispatchable(edge_attrs={"attr": "default"}) def branching_weight(G, attr="weight", default=1): """ Returns the total weight of a branching. @@ -108,7 +108,7 @@ def branching_weight(G, attr="weight", default=1): @py_random_state(4) -@nx._dispatch(edge_attrs={"attr": "default"}) +@nx._dispatchable(edge_attrs={"attr": "default"}) def greedy_branching(G, attr="weight", default=1, kind="max", seed=None): """ Returns a branching obtained through a greedy algorithm. @@ -745,7 +745,7 @@ def is_root(G, u, edgekeys): return H -@nx._dispatch( +@nx._dispatchable( edge_attrs={"attr": "default", "partition": 0}, preserve_edge_attrs="preserve_attrs", ) @@ -1173,7 +1173,7 @@ def is_root(G, u, edgekeys): return H -@nx._dispatch( +@nx._dispatchable( edge_attrs={"attr": "default", "partition": None}, preserve_edge_attrs="preserve_attrs", ) @@ -1194,7 +1194,7 @@ def minimum_branching( return B -@nx._dispatch( +@nx._dispatchable( edge_attrs={"attr": "default", "partition": None}, preserve_edge_attrs="preserve_attrs", ) @@ -1258,7 +1258,7 @@ def minimal_branching( return B -@nx._dispatch( +@nx._dispatchable( edge_attrs={"attr": "default", "partition": None}, preserve_edge_attrs="preserve_attrs", ) @@ -1300,7 +1300,7 @@ def maximum_spanning_arborescence( return B -@nx._dispatch( +@nx._dispatchable( edge_attrs={"attr": "default", "partition": None}, preserve_edge_attrs="preserve_attrs", ) diff --git a/networkx/algorithms/tree/coding.py b/networkx/algorithms/tree/coding.py index a74fd48cff0..55344dbcbcf 100644 --- a/networkx/algorithms/tree/coding.py +++ b/networkx/algorithms/tree/coding.py @@ -32,7 +32,7 @@ class NotATree(nx.NetworkXException): @not_implemented_for("directed") -@nx._dispatch(graphs="T") +@nx._dispatchable(graphs="T") def to_nested_tuple(T, root, canonical_form=False): """Returns a nested tuple representation of the given tree. @@ -128,7 +128,7 @@ def _make_tuple(T, root, _parent): return _make_tuple(T, root, None) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_nested_tuple(sequence, sensible_relabeling=False): """Returns the rooted tree corresponding to the given nested tuple. @@ -213,7 +213,7 @@ def _make_tree(sequence): @not_implemented_for("directed") -@nx._dispatch(graphs="T") +@nx._dispatchable(graphs="T") def to_prufer_sequence(T): r"""Returns the Prüfer sequence of the given tree. @@ -314,7 +314,7 @@ def parents(u): return result -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_prufer_sequence(sequence): r"""Returns the tree corresponding to the given Prüfer sequence. diff --git a/networkx/algorithms/tree/decomposition.py b/networkx/algorithms/tree/decomposition.py index 0517100016f..a649288ffe8 100644 --- a/networkx/algorithms/tree/decomposition.py +++ b/networkx/algorithms/tree/decomposition.py @@ -10,7 +10,7 @@ @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def junction_tree(G): r"""Returns a junction tree of a given graph. diff --git a/networkx/algorithms/tree/mst.py b/networkx/algorithms/tree/mst.py index ee264d41ae5..8b76bc470ab 100644 --- a/networkx/algorithms/tree/mst.py +++ b/networkx/algorithms/tree/mst.py @@ -41,7 +41,7 @@ class EdgePartition(Enum): @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight", preserve_edge_attrs="data") +@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data") def boruvka_mst_edges( G, minimum=True, weight="weight", keys=False, data=True, ignore_nan=False ): @@ -138,7 +138,7 @@ def best_edge(component): forest.union(u, v) -@nx._dispatch( +@nx._dispatchable( edge_attrs={"weight": None, "partition": None}, preserve_edge_attrs="data" ) def kruskal_mst_edges( @@ -251,7 +251,7 @@ def kruskal_mst_edges( subtrees.union(u, v) -@nx._dispatch(edge_attrs="weight", preserve_edge_attrs="data") +@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data") def prim_mst_edges(G, minimum, weight="weight", keys=True, data=True, ignore_nan=False): """Iterate over edges of Prim's algorithm min/max spanning tree. @@ -367,7 +367,7 @@ def prim_mst_edges(G, minimum, weight="weight", keys=True, data=True, ignore_nan @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight", preserve_edge_attrs="data") +@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data") def minimum_spanning_edges( G, algorithm="kruskal", weight="weight", keys=True, data=True, ignore_nan=False ): @@ -462,7 +462,7 @@ def minimum_spanning_edges( @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight", preserve_edge_attrs="data") +@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data") def maximum_spanning_edges( G, algorithm="kruskal", weight="weight", keys=True, data=True, ignore_nan=False ): @@ -555,7 +555,7 @@ def maximum_spanning_edges( ) -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def minimum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False): """Returns a minimum spanning tree or forest on an undirected graph `G`. @@ -615,7 +615,7 @@ def minimum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=Fa return T -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def partition_spanning_tree( G, minimum=True, weight="weight", partition="partition", ignore_nan=False ): @@ -679,7 +679,7 @@ def partition_spanning_tree( return T -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def maximum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False): """Returns a maximum spanning tree or forest on an undirected graph `G`. @@ -743,7 +743,7 @@ def maximum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=Fa @py_random_state(3) -@nx._dispatch(preserve_edge_attrs=True) +@nx._dispatchable(preserve_edge_attrs=True) def random_spanning_tree(G, weight=None, *, multiplicative=True, seed=None): """ Sample a random spanning tree using the edges weights of `G`. diff --git a/networkx/algorithms/tree/operations.py b/networkx/algorithms/tree/operations.py index df1b4e7bec0..14b7c232bcf 100644 --- a/networkx/algorithms/tree/operations.py +++ b/networkx/algorithms/tree/operations.py @@ -32,7 +32,7 @@ def join(rooted_trees, label_attribute=None): # Argument types don't match dispatching, but allow manual selection of backend -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def join_trees(rooted_trees, *, label_attribute=None, first_label=0): """Returns a new rooted tree made by joining `rooted_trees` diff --git a/networkx/algorithms/tree/recognition.py b/networkx/algorithms/tree/recognition.py index 15bbdf7d83b..a9eae98707a 100644 --- a/networkx/algorithms/tree/recognition.py +++ b/networkx/algorithms/tree/recognition.py @@ -79,7 +79,7 @@ @nx.utils.not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_arborescence(G): """ Returns True if `G` is an arborescence. @@ -119,7 +119,7 @@ def is_arborescence(G): @nx.utils.not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_branching(G): """ Returns True if `G` is a branching. @@ -158,7 +158,7 @@ def is_branching(G): return is_forest(G) and max(d for n, d in G.in_degree()) <= 1 -@nx._dispatch +@nx._dispatchable def is_forest(G): """ Returns True if `G` is a forest. @@ -215,7 +215,7 @@ def is_forest(G): return all(len(c) - 1 == c.number_of_edges() for c in components) -@nx._dispatch +@nx._dispatchable def is_tree(G): """ Returns True if `G` is a tree. diff --git a/networkx/algorithms/triads.py b/networkx/algorithms/triads.py index 07172465dae..c2cdf2e8008 100644 --- a/networkx/algorithms/triads.py +++ b/networkx/algorithms/triads.py @@ -129,7 +129,7 @@ def _tricode(G, v, u, w): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def triadic_census(G, nodelist=None): """Determines the triadic census of a directed graph. @@ -281,7 +281,7 @@ def triadic_census(G, nodelist=None): return census -@nx._dispatch +@nx._dispatchable def is_triad(G): """Returns True if the graph G is a triad, else False. @@ -312,7 +312,7 @@ def is_triad(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def all_triplets(G): """Returns a generator of all possible sets of 3 nodes in a DiGraph. @@ -355,7 +355,7 @@ def all_triplets(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def all_triads(G): """A generator of all possible triads in G. @@ -386,7 +386,7 @@ def all_triads(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def triads_by_type(G): """Returns a list of all triads for each triad type in a directed graph. There are exactly 16 different types of triads possible. Suppose 1, 2, 3 are three @@ -449,7 +449,7 @@ def triads_by_type(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def triad_type(G): """Returns the sociological triad type for a triad. @@ -548,7 +548,7 @@ def triad_type(G): @not_implemented_for("undirected") @py_random_state(1) -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def random_triad(G, seed=None): """Returns a random triad from a directed graph. diff --git a/networkx/algorithms/vitality.py b/networkx/algorithms/vitality.py index c41efd13f2c..29f98fd1bae 100644 --- a/networkx/algorithms/vitality.py +++ b/networkx/algorithms/vitality.py @@ -8,7 +8,7 @@ __all__ = ["closeness_vitality"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def closeness_vitality(G, node=None, weight=None, wiener_index=None): """Returns the closeness vitality for nodes in the graph. diff --git a/networkx/algorithms/voronoi.py b/networkx/algorithms/voronoi.py index af17f013ec8..60c45332339 100644 --- a/networkx/algorithms/voronoi.py +++ b/networkx/algorithms/voronoi.py @@ -5,7 +5,7 @@ __all__ = ["voronoi_cells"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def voronoi_cells(G, center_nodes, weight="weight"): """Returns the Voronoi cells centered at `center_nodes` with respect to the shortest-path distance metric. diff --git a/networkx/algorithms/walks.py b/networkx/algorithms/walks.py index 6f357ce1d42..91214c8e778 100644 --- a/networkx/algorithms/walks.py +++ b/networkx/algorithms/walks.py @@ -6,7 +6,7 @@ __all__ = ["number_of_walks"] -@nx._dispatch +@nx._dispatchable def number_of_walks(G, walk_length): """Returns the number of walks connecting each pair of nodes in `G` diff --git a/networkx/algorithms/wiener.py b/networkx/algorithms/wiener.py index 9e81cdc72ca..dd998ea33c0 100644 --- a/networkx/algorithms/wiener.py +++ b/networkx/algorithms/wiener.py @@ -14,7 +14,7 @@ chaini = chain.from_iterable -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def wiener_index(G, weight=None): """Returns the Wiener index of the given graph. diff --git a/networkx/classes/tests/dispatch_interface.py b/networkx/classes/tests/dispatch_interface.py index 5cef755241b..96c363b93bb 100644 --- a/networkx/classes/tests/dispatch_interface.py +++ b/networkx/classes/tests/dispatch_interface.py @@ -6,7 +6,7 @@ # This is comprehensive, but only tests the `test_override_dispatch` # function in networkx.classes.backends. -# To test the `_dispatch` function directly, several tests scattered throughout +# To test the `_dispatchable` function directly, several tests scattered throughout # NetworkX have been augmented to test normal and dispatch mode. # Searching for `dispatch_interface` should locate the specific tests. diff --git a/networkx/classes/tests/test_backends.py b/networkx/classes/tests/test_backends.py index 7adb7009aef..e1a7c1cf6ff 100644 --- a/networkx/classes/tests/test_backends.py +++ b/networkx/classes/tests/test_backends.py @@ -23,8 +23,8 @@ def test_pickle(): @pytest.mark.skipif( - "not nx._dispatch._automatic_backends " - "or nx._dispatch._automatic_backends[0] != 'nx-loopback'" + "not nx._dispatchable._automatic_backends " + "or nx._dispatchable._automatic_backends[0] != 'nx-loopback'" ) def test_graph_converter_needs_backend(): # When testing, `nx.from_scipy_sparse_array` will *always* call the backend diff --git a/networkx/conftest.py b/networkx/conftest.py index 86cea45f944..1f045517575 100644 --- a/networkx/conftest.py +++ b/networkx/conftest.py @@ -46,11 +46,11 @@ def pytest_configure(config): if backend is None: backend = os.environ.get("NETWORKX_TEST_BACKEND") if backend: - networkx.utils.backends._dispatch._automatic_backends = [backend] + networkx.utils.backends._dispatchable._automatic_backends = [backend] fallback_to_nx = config.getoption("--fallback-to-nx") if not fallback_to_nx: fallback_to_nx = os.environ.get("NETWORKX_FALLBACK_TO_NX") - networkx.utils.backends._dispatch._fallback_to_nx = bool(fallback_to_nx) + networkx.utils.backends._dispatchable._fallback_to_nx = bool(fallback_to_nx) # nx-loopback backend is only available when testing backends = entry_points(name="nx-loopback", group="networkx.backends") if backends: @@ -69,8 +69,8 @@ def pytest_configure(config): def pytest_collection_modifyitems(config, items): # Setting this to True here allows tests to be set up before dispatching # any function call to a backend. - networkx.utils.backends._dispatch._is_testing = True - if automatic_backends := networkx.utils.backends._dispatch._automatic_backends: + networkx.utils.backends._dispatchable._is_testing = True + if automatic_backends := networkx.utils.backends._dispatchable._automatic_backends: # Allow pluggable backends to add markers to tests (such as skip or xfail) # when running in auto-conversion test mode backend = networkx.utils.backends.backends[automatic_backends[0]].load() diff --git a/networkx/convert.py b/networkx/convert.py index 14598024684..f62df458e07 100644 --- a/networkx/convert.py +++ b/networkx/convert.py @@ -176,7 +176,7 @@ def to_networkx_graph(data, create_using=None, multigraph_input=False): raise nx.NetworkXError("Input is not a known data type for conversion.") -@nx._dispatch +@nx._dispatchable def to_dict_of_lists(G, nodelist=None): """Returns adjacency representation of graph as a dictionary of lists. @@ -202,7 +202,7 @@ def to_dict_of_lists(G, nodelist=None): return d -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_dict_of_lists(d, create_using=None): """Returns a graph from a dictionary of lists. @@ -364,7 +364,7 @@ def to_dict_of_dicts(G, nodelist=None, edge_data=None): return dod -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_dict_of_dicts(d, create_using=None, multigraph_input=False): """Returns a graph from a dictionary of dictionaries. @@ -451,7 +451,7 @@ def from_dict_of_dicts(d, create_using=None, multigraph_input=False): return G -@nx._dispatch(preserve_edge_attrs=True) +@nx._dispatchable(preserve_edge_attrs=True) def to_edgelist(G, nodelist=None): """Returns a list of edges in the graph. @@ -469,7 +469,7 @@ def to_edgelist(G, nodelist=None): return G.edges(nodelist, data=True) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_edgelist(edgelist, create_using=None): """Returns a graph from a list of edges. diff --git a/networkx/convert_matrix.py b/networkx/convert_matrix.py index c731985bb9f..ae756e59934 100644 --- a/networkx/convert_matrix.py +++ b/networkx/convert_matrix.py @@ -43,7 +43,7 @@ ] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def to_pandas_adjacency( G, nodelist=None, @@ -150,7 +150,7 @@ def to_pandas_adjacency( return pd.DataFrame(data=M, index=nodelist, columns=nodelist) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_pandas_adjacency(df, create_using=None): r"""Returns a graph from Pandas DataFrame. @@ -219,7 +219,7 @@ def from_pandas_adjacency(df, create_using=None): return G -@nx._dispatch(preserve_edge_attrs=True) +@nx._dispatchable(preserve_edge_attrs=True) def to_pandas_edgelist( G, source="source", @@ -311,7 +311,7 @@ def to_pandas_edgelist( return pd.DataFrame(edgelistdict, dtype=dtype) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_pandas_edgelist( df, source="source", @@ -483,7 +483,7 @@ def from_pandas_edgelist( return g -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def to_scipy_sparse_array(G, nodelist=None, dtype=None, weight="weight", format="csr"): """Returns the graph adjacency matrix as a SciPy sparse array. @@ -675,7 +675,7 @@ def _generate_weighted_edges(A): return _coo_gen_triples(A.tocoo()) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_scipy_sparse_array( A, parallel_edges=False, create_using=None, edge_attribute="weight" ): @@ -780,7 +780,7 @@ def from_scipy_sparse_array( return G -@nx._dispatch(edge_attrs="weight") # edge attrs may also be obtained from `dtype` +@nx._dispatchable(edge_attrs="weight") # edge attrs may also be obtained from `dtype` def to_numpy_array( G, nodelist=None, @@ -1019,7 +1019,7 @@ def to_numpy_array( return A -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_numpy_array(A, parallel_edges=False, create_using=None, edge_attr="weight"): """Returns a graph from a 2D NumPy array. diff --git a/networkx/drawing/nx_agraph.py b/networkx/drawing/nx_agraph.py index da0758493f3..1682c62dcf1 100644 --- a/networkx/drawing/nx_agraph.py +++ b/networkx/drawing/nx_agraph.py @@ -33,7 +33,7 @@ ] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_agraph(A, create_using=None): """Returns a NetworkX Graph or DiGraph from a PyGraphviz graph. @@ -203,7 +203,7 @@ def write_dot(G, path): return -@nx._dispatch(name="agraph_read_dot", graphs=None) +@nx._dispatchable(name="agraph_read_dot", graphs=None) def read_dot(path): """Returns a NetworkX graph from a dot file on path. diff --git a/networkx/drawing/nx_pydot.py b/networkx/drawing/nx_pydot.py index 4e6a4dccf52..928970d83d0 100644 --- a/networkx/drawing/nx_pydot.py +++ b/networkx/drawing/nx_pydot.py @@ -46,7 +46,7 @@ def write_dot(G, path): @open_file(0, mode="r") -@nx._dispatch(name="pydot_read_dot", graphs=None) +@nx._dispatchable(name="pydot_read_dot", graphs=None) def read_dot(path): """Returns a NetworkX :class:`MultiGraph` or :class:`MultiDiGraph` from the dot file with the passed path. @@ -80,7 +80,7 @@ def read_dot(path): return from_pydot(P_list[0]) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_pydot(P): """Returns a NetworkX graph from a Pydot graph. diff --git a/networkx/generators/atlas.py b/networkx/generators/atlas.py index 8e57ec98b2e..0a34bc466b8 100644 --- a/networkx/generators/atlas.py +++ b/networkx/generators/atlas.py @@ -88,7 +88,7 @@ def _generate_graphs(): yield G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def graph_atlas(i): """Returns graph number `i` from the Graph Atlas. @@ -127,7 +127,7 @@ def graph_atlas(i): return next(islice(_generate_graphs(), i, None)) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def graph_atlas_g(): """Returns the list of all graphs with up to seven nodes named in the Graph Atlas. diff --git a/networkx/generators/classic.py b/networkx/generators/classic.py index 4bb660b5a8a..e220f82d8fd 100644 --- a/networkx/generators/classic.py +++ b/networkx/generators/classic.py @@ -65,7 +65,7 @@ def _tree_edges(n, r): break -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def full_rary_tree(r, n, create_using=None): """Creates a full r-ary tree of `n` nodes. @@ -103,7 +103,7 @@ def full_rary_tree(r, n, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def balanced_tree(r, h, create_using=None): """Returns the perfectly balanced `r`-ary tree of height `h`. @@ -153,7 +153,7 @@ def balanced_tree(r, h, create_using=None): return full_rary_tree(r, n, create_using=create_using) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def barbell_graph(m1, m2, create_using=None): """Returns the Barbell Graph: two complete graphs connected by a path. @@ -226,7 +226,7 @@ def barbell_graph(m1, m2, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def binomial_tree(n, create_using=None): """Returns the Binomial Tree of order n. @@ -264,7 +264,7 @@ def binomial_tree(n, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) @nodes_or_number(0) def complete_graph(n, create_using=None): """Return the complete graph `K_n` with n nodes. @@ -312,7 +312,7 @@ def complete_graph(n, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def circular_ladder_graph(n, create_using=None): """Returns the circular ladder graph $CL_n$ of length n. @@ -332,7 +332,7 @@ def circular_ladder_graph(n, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def circulant_graph(n, offsets, create_using=None): r"""Returns the circulant graph $Ci_n(x_1, x_2, ..., x_m)$ with $n$ nodes. @@ -409,7 +409,7 @@ def circulant_graph(n, offsets, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) @nodes_or_number(0) def cycle_graph(n, create_using=None): """Returns the cycle graph $C_n$ of cyclically connected nodes. @@ -441,7 +441,7 @@ def cycle_graph(n, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def dorogovtsev_goltsev_mendes_graph(n, create_using=None): """Returns the hierarchically constructed Dorogovtsev-Goltsev-Mendes graph. @@ -503,7 +503,7 @@ def dorogovtsev_goltsev_mendes_graph(n, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) @nodes_or_number(0) def empty_graph(n=0, create_using=None, default=Graph): """Returns the empty graph with n nodes and zero edges. @@ -602,7 +602,7 @@ def empty_graph(n=0, create_using=None, default=Graph): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def ladder_graph(n, create_using=None): """Returns the Ladder graph of length n. @@ -625,7 +625,7 @@ def ladder_graph(n, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) @nodes_or_number([0, 1]) def lollipop_graph(m, n, create_using=None): """Returns the Lollipop Graph; ``K_m`` connected to ``P_n``. @@ -692,7 +692,7 @@ def lollipop_graph(m, n, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def null_graph(create_using=None): """Returns the Null graph with no nodes or edges. @@ -703,7 +703,7 @@ def null_graph(create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) @nodes_or_number(0) def path_graph(n, create_using=None): """Returns the Path graph `P_n` of linearly connected nodes. @@ -729,7 +729,7 @@ def path_graph(n, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) @nodes_or_number(0) def star_graph(n, create_using=None): """Return the star graph @@ -768,7 +768,7 @@ def star_graph(n, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) @nodes_or_number([0, 1]) def tadpole_graph(m, n, create_using=None): """Returns the (m,n)-tadpole graph; ``C_m`` connected to ``P_n``. @@ -830,7 +830,7 @@ def tadpole_graph(m, n, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def trivial_graph(create_using=None): """Return the Trivial graph with one node (with label 0) and no edges. @@ -843,7 +843,7 @@ def trivial_graph(create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def turan_graph(n, r): r"""Return the Turan Graph @@ -881,7 +881,7 @@ def turan_graph(n, r): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) @nodes_or_number(0) def wheel_graph(n, create_using=None): """Return the wheel graph @@ -917,7 +917,7 @@ def wheel_graph(n, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def complete_multipartite_graph(*subset_sizes): """Returns the complete multipartite graph with the specified subset sizes. diff --git a/networkx/generators/cographs.py b/networkx/generators/cographs.py index cb1f8d71b13..50f38ed76bf 100644 --- a/networkx/generators/cographs.py +++ b/networkx/generators/cographs.py @@ -18,7 +18,7 @@ @py_random_state(1) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_cograph(n, seed=None): r"""Returns a random cograph with $2 ^ n$ nodes. diff --git a/networkx/generators/community.py b/networkx/generators/community.py index 0873b804958..5293fdf1181 100644 --- a/networkx/generators/community.py +++ b/networkx/generators/community.py @@ -19,7 +19,7 @@ ] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def caveman_graph(l, k): """Returns a caveman graph of `l` cliques of size `k`. @@ -66,7 +66,7 @@ def caveman_graph(l, k): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def connected_caveman_graph(l, k): """Returns a connected caveman graph of `l` cliques of size `k`. @@ -121,7 +121,7 @@ def connected_caveman_graph(l, k): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def relaxed_caveman_graph(l, k, p, seed=None): """Returns a relaxed caveman graph. @@ -173,7 +173,7 @@ def relaxed_caveman_graph(l, k, p, seed=None): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_partition_graph(sizes, p_in, p_out, seed=None, directed=False): """Returns the random partition graph with a partition of sizes. @@ -252,7 +252,7 @@ def random_partition_graph(sizes, p_in, p_out, seed=None, directed=False): @py_random_state(4) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def planted_partition_graph(l, k, p_in, p_out, seed=None, directed=False): """Returns the planted l-partition graph. @@ -308,7 +308,7 @@ def planted_partition_graph(l, k, p_in, p_out, seed=None, directed=False): @py_random_state(6) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, seed=None): """Generate a Gaussian random partition graph. @@ -384,7 +384,7 @@ def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, seed=N return random_partition_graph(sizes, p_in, p_out, seed=seed, directed=directed) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def ring_of_cliques(num_cliques, clique_size): """Defines a "ring of cliques" graph. @@ -440,7 +440,7 @@ def ring_of_cliques(num_cliques, clique_size): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def windmill_graph(n, k): """Generate a windmill graph. A windmill graph is a graph of `n` cliques each of size `k` that are all @@ -494,7 +494,7 @@ def windmill_graph(n, k): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def stochastic_block_model( sizes, p, nodelist=None, seed=None, directed=False, selfloops=False, sparse=True ): @@ -808,7 +808,7 @@ def _generate_communities(degree_seq, community_sizes, mu, max_iters, seed): @py_random_state(11) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def LFR_benchmark_graph( n, tau1, diff --git a/networkx/generators/degree_seq.py b/networkx/generators/degree_seq.py index fd969110189..0e9414dd881 100644 --- a/networkx/generators/degree_seq.py +++ b/networkx/generators/degree_seq.py @@ -124,7 +124,7 @@ def _configuration_model( @py_random_state(2) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def configuration_model(deg_sequence, create_using=None, seed=None): """Returns a random graph with the given degree sequence. @@ -227,7 +227,7 @@ def configuration_model(deg_sequence, create_using=None, seed=None): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def directed_configuration_model( in_degree_sequence, out_degree_sequence, create_using=None, seed=None ): @@ -330,7 +330,7 @@ def directed_configuration_model( @py_random_state(1) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def expected_degree_graph(w, seed=None, selfloops=True): r"""Returns a random graph with given expected degrees. @@ -439,7 +439,7 @@ def expected_degree_graph(w, seed=None, selfloops=True): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def havel_hakimi_graph(deg_sequence, create_using=None): """Returns a simple graph with given degree sequence constructed using the Havel-Hakimi algorithm. @@ -532,7 +532,7 @@ def havel_hakimi_graph(deg_sequence, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def directed_havel_hakimi_graph(in_deg_sequence, out_deg_sequence, create_using=None): """Returns a directed graph with the given degree sequences. @@ -644,7 +644,7 @@ def directed_havel_hakimi_graph(in_deg_sequence, out_deg_sequence, create_using= return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def degree_sequence_tree(deg_sequence, create_using=None): """Make a tree for the given degree sequence. @@ -691,7 +691,7 @@ def degree_sequence_tree(deg_sequence, create_using=None): @py_random_state(1) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_degree_sequence_graph(sequence, seed=None, tries=10): r"""Returns a simple random graph with the given degree sequence. diff --git a/networkx/generators/directed.py b/networkx/generators/directed.py index e084ebe8c4f..025d7957dcc 100644 --- a/networkx/generators/directed.py +++ b/networkx/generators/directed.py @@ -21,7 +21,7 @@ @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def gn_graph(n, kernel=None, create_using=None, seed=None): """Returns the growing network (GN) digraph with `n` nodes. @@ -89,7 +89,7 @@ def kernel(x): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def gnr_graph(n, p, create_using=None, seed=None): """Returns the growing network with redirection (GNR) digraph with `n` nodes and redirection probability `p`. @@ -143,7 +143,7 @@ def gnr_graph(n, p, create_using=None, seed=None): @py_random_state(2) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def gnc_graph(n, create_using=None, seed=None): """Returns the growing network with copying (GNC) digraph with `n` nodes. @@ -183,7 +183,7 @@ def gnc_graph(n, create_using=None, seed=None): @py_random_state(6) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def scale_free_graph( n, alpha=0.41, @@ -334,7 +334,7 @@ def _choose_node(candidates, node_list, delta): @py_random_state(4) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_uniform_k_out_graph(n, k, self_loops=True, with_replacement=True, seed=None): """Returns a random `k`-out graph with uniform attachment. @@ -415,7 +415,7 @@ def sample(v, nodes): @py_random_state(4) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_k_out_graph(n, k, alpha, self_loops=True, seed=None): """Returns a random `k`-out graph with preferential attachment. diff --git a/networkx/generators/duplication.py b/networkx/generators/duplication.py index ef7f374bbda..6a2e5d57e07 100644 --- a/networkx/generators/duplication.py +++ b/networkx/generators/duplication.py @@ -13,7 +13,7 @@ @py_random_state(4) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def partial_duplication_graph(N, n, p, q, seed=None): """Returns a random graph using the partial duplication model. @@ -88,7 +88,7 @@ def partial_duplication_graph(N, n, p, q, seed=None): @py_random_state(2) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def duplication_divergence_graph(n, p, seed=None): """Returns an undirected graph using the duplication-divergence model. diff --git a/networkx/generators/ego.py b/networkx/generators/ego.py index ab9ed257cc8..9803783ae1e 100644 --- a/networkx/generators/ego.py +++ b/networkx/generators/ego.py @@ -6,7 +6,7 @@ import networkx as nx -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def ego_graph(G, n, radius=1, center=True, undirected=False, distance=None): """Returns induced subgraph of neighbors centered at node n within a given radius. diff --git a/networkx/generators/expanders.py b/networkx/generators/expanders.py index 2cd7976025a..33522ef550f 100644 --- a/networkx/generators/expanders.py +++ b/networkx/generators/expanders.py @@ -47,7 +47,7 @@ # (x, (y + (2*x + 1)) % n), # (x, (y + (2*x + 2)) % n), # -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def margulis_gabber_galil_graph(n, create_using=None): r"""Returns the Margulis-Gabber-Galil undirected MultiGraph on `n^2` nodes. @@ -90,7 +90,7 @@ def margulis_gabber_galil_graph(n, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def chordal_cycle_graph(p, create_using=None): """Returns the chordal cycle graph on `p` nodes. @@ -154,7 +154,7 @@ def chordal_cycle_graph(p, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def paley_graph(p, create_using=None): r"""Returns the Paley $\frac{(p-1)}{2}$ -regular graph on $p$ nodes. diff --git a/networkx/generators/geometric.py b/networkx/generators/geometric.py index 052d96e3a71..4035a1ea5c5 100644 --- a/networkx/generators/geometric.py +++ b/networkx/generators/geometric.py @@ -20,7 +20,7 @@ ] -@nx._dispatch(node_attrs="pos_name") +@nx._dispatchable(node_attrs="pos_name") def geometric_edges(G, radius, p=2, *, pos_name="pos"): """Returns edge list of node pairs within `radius` of each other. @@ -111,7 +111,7 @@ def _geometric_edges(G, radius, p, pos_name): @py_random_state(5) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_geometric_graph( n, radius, dim=2, pos=None, p=2, seed=None, *, pos_name="pos" ): @@ -205,7 +205,7 @@ def random_geometric_graph( @py_random_state(6) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def soft_random_geometric_graph( n, radius, dim=2, pos=None, p=2, p_dist=None, seed=None, *, pos_name="pos" ): @@ -335,7 +335,7 @@ def should_join(edge): @py_random_state(7) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def geographical_threshold_graph( n, theta, @@ -502,7 +502,7 @@ def should_join(pair): @py_random_state(6) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def waxman_graph( n, beta=0.4, @@ -635,7 +635,7 @@ def should_join(pair): @py_random_state(5) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None): r"""Returns a navigable small-world graph. @@ -708,7 +708,7 @@ def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None): @py_random_state(7) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def thresholded_random_geometric_graph( n, radius, @@ -848,7 +848,7 @@ def thresholded_random_geometric_graph( @py_random_state(5) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def geometric_soft_configuration_graph( *, beta, n=None, gamma=None, mean_degree=None, kappas=None, seed=None ): diff --git a/networkx/generators/harary_graph.py b/networkx/generators/harary_graph.py index c752fab05f6..250f98985d6 100644 --- a/networkx/generators/harary_graph.py +++ b/networkx/generators/harary_graph.py @@ -21,7 +21,7 @@ __all__ = ["hnm_harary_graph", "hkn_harary_graph"] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def hnm_harary_graph(n, m, create_using=None): """Returns the Harary graph with given numbers of nodes and edges. @@ -113,7 +113,7 @@ def hnm_harary_graph(n, m, create_using=None): return H -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def hkn_harary_graph(k, n, create_using=None): """Returns the Harary graph with given node connectivity and node number. diff --git a/networkx/generators/internet_as_graphs.py b/networkx/generators/internet_as_graphs.py index 90e90203e0b..ad45b23d93e 100644 --- a/networkx/generators/internet_as_graphs.py +++ b/networkx/generators/internet_as_graphs.py @@ -397,7 +397,7 @@ def generate(self): @py_random_state(1) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_internet_as_graph(n, seed=None): """Generates a random undirected graph resembling the Internet AS network diff --git a/networkx/generators/intersection.py b/networkx/generators/intersection.py index 1bbcf156b04..be88448f066 100644 --- a/networkx/generators/intersection.py +++ b/networkx/generators/intersection.py @@ -12,7 +12,7 @@ @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def uniform_random_intersection_graph(n, m, p, seed=None): """Returns a uniform random intersection graph. @@ -48,7 +48,7 @@ def uniform_random_intersection_graph(n, m, p, seed=None): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def k_random_intersection_graph(n, m, k, seed=None): """Returns a intersection graph with randomly chosen attribute sets for each node that are of equal size (k). @@ -84,7 +84,7 @@ def k_random_intersection_graph(n, m, k, seed=None): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def general_random_intersection_graph(n, m, p, seed=None): """Returns a random intersection graph with independent probabilities for connections between node and attribute sets. diff --git a/networkx/generators/interval_graph.py b/networkx/generators/interval_graph.py index c0efb34508a..6373a24b34c 100644 --- a/networkx/generators/interval_graph.py +++ b/networkx/generators/interval_graph.py @@ -8,7 +8,7 @@ __all__ = ["interval_graph"] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def interval_graph(intervals): """Generates an interval graph for a list of intervals given. diff --git a/networkx/generators/joint_degree_seq.py b/networkx/generators/joint_degree_seq.py index fd2d56eba5f..7ad03288c86 100644 --- a/networkx/generators/joint_degree_seq.py +++ b/networkx/generators/joint_degree_seq.py @@ -11,7 +11,7 @@ ] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_valid_joint_degree(joint_degrees): """Checks whether the given joint degree dictionary is realizable. @@ -142,7 +142,7 @@ def _neighbor_switch(G, w, unsat, h_node_residual, avoid_node_id=None): @py_random_state(1) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def joint_degree_graph(joint_degrees, seed=None): """Generates a random simple graph with the given joint degree dictionary. @@ -285,7 +285,7 @@ def joint_degree_graph(joint_degrees, seed=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_valid_directed_joint_degree(in_degrees, out_degrees, nkk): """Checks whether the given directed joint degree input is realizable @@ -469,7 +469,7 @@ def _directed_neighbor_switch_rev( @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def directed_joint_degree_graph(in_degrees, out_degrees, nkk, seed=None): """Generates a random simple directed graph with the joint degree. diff --git a/networkx/generators/lattice.py b/networkx/generators/lattice.py index ba1f204af14..c046b8e9b11 100644 --- a/networkx/generators/lattice.py +++ b/networkx/generators/lattice.py @@ -32,7 +32,7 @@ ] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) @nodes_or_number([0, 1]) def grid_2d_graph(m, n, periodic=False, create_using=None): """Returns the two-dimensional grid graph. @@ -86,7 +86,7 @@ def grid_2d_graph(m, n, periodic=False, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def grid_graph(dim, periodic=False): """Returns the *n*-dimensional grid graph. @@ -143,7 +143,7 @@ def grid_graph(dim, periodic=False): return H -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def hypercube_graph(n): """Returns the *n*-dimensional hypercube graph. @@ -170,7 +170,7 @@ def hypercube_graph(n): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def triangular_lattice_graph( m, n, periodic=False, with_positions=True, create_using=None ): @@ -271,7 +271,7 @@ def triangular_lattice_graph( return H -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def hexagonal_lattice_graph( m, n, periodic=False, with_positions=True, create_using=None ): diff --git a/networkx/generators/line.py b/networkx/generators/line.py index ccee9605584..83c6481fe43 100644 --- a/networkx/generators/line.py +++ b/networkx/generators/line.py @@ -10,7 +10,7 @@ __all__ = ["line_graph", "inverse_line_graph"] -@nx._dispatch +@nx._dispatchable def line_graph(G, create_using=None): r"""Returns the line graph of the graph or digraph `G`. @@ -215,7 +215,7 @@ def _lg_undirected(G, selfloops=False, create_using=None): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def inverse_line_graph(G): """Returns the inverse line graph of graph G. diff --git a/networkx/generators/mycielski.py b/networkx/generators/mycielski.py index 6e966d1fecb..492be58fd8e 100644 --- a/networkx/generators/mycielski.py +++ b/networkx/generators/mycielski.py @@ -11,7 +11,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def mycielskian(G, iterations=1): r"""Returns the Mycielskian of a simple, undirected graph G @@ -68,7 +68,7 @@ def mycielskian(G, iterations=1): return M -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def mycielski_graph(n): """Generator for the n_th Mycielski Graph. diff --git a/networkx/generators/nonisomorphic_trees.py b/networkx/generators/nonisomorphic_trees.py index ae74b09c9cc..6f5d6412361 100644 --- a/networkx/generators/nonisomorphic_trees.py +++ b/networkx/generators/nonisomorphic_trees.py @@ -12,7 +12,7 @@ import networkx as nx -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def nonisomorphic_trees(order, create="graph"): """Returns a list of nonisomorphic trees @@ -52,7 +52,7 @@ def nonisomorphic_trees(order, create="graph"): layout = _next_rooted_tree(layout) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def number_of_nonisomorphic_trees(order): """Returns the number of nonisomorphic trees diff --git a/networkx/generators/random_clustered.py b/networkx/generators/random_clustered.py index 676346ab9df..abd7a8c9c6e 100644 --- a/networkx/generators/random_clustered.py +++ b/networkx/generators/random_clustered.py @@ -7,7 +7,7 @@ @py_random_state(2) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_clustered_graph(joint_degree_sequence, create_using=None, seed=None): r"""Generate a random graph with the given joint independent edge degree and triangle degree sequence. diff --git a/networkx/generators/random_graphs.py b/networkx/generators/random_graphs.py index e08c4ce7d1f..dc65a962123 100644 --- a/networkx/generators/random_graphs.py +++ b/networkx/generators/random_graphs.py @@ -37,7 +37,7 @@ @py_random_state(2) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def fast_gnp_random_graph(n, p, seed=None, directed=False): """Returns a $G_{n,p}$ random graph, also known as an Erdős-Rényi graph or a binomial graph. @@ -109,7 +109,7 @@ def fast_gnp_random_graph(n, p, seed=None, directed=False): @py_random_state(2) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def gnp_random_graph(n, p, seed=None, directed=False): """Returns a $G_{n,p}$ random graph, also known as an Erdős-Rényi graph or a binomial graph. @@ -174,7 +174,7 @@ def gnp_random_graph(n, p, seed=None, directed=False): @py_random_state(2) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def dense_gnm_random_graph(n, m, seed=None): """Returns a $G_{n,m}$ random graph. @@ -236,7 +236,7 @@ def dense_gnm_random_graph(n, m, seed=None): @py_random_state(2) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def gnm_random_graph(n, m, seed=None, directed=False): """Returns a $G_{n,m}$ random graph. @@ -292,7 +292,7 @@ def gnm_random_graph(n, m, seed=None, directed=False): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def newman_watts_strogatz_graph(n, k, p, seed=None): """Returns a Newman–Watts–Strogatz small-world graph. @@ -363,7 +363,7 @@ def newman_watts_strogatz_graph(n, k, p, seed=None): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def watts_strogatz_graph(n, k, p, seed=None): """Returns a Watts–Strogatz small-world graph. @@ -438,7 +438,7 @@ def watts_strogatz_graph(n, k, p, seed=None): @py_random_state(4) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None): """Returns a connected Watts–Strogatz small-world graph. @@ -491,7 +491,7 @@ def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None): @py_random_state(2) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_regular_graph(d, n, seed=None): r"""Returns a random $d$-regular graph on $n$ nodes. @@ -622,7 +622,7 @@ def _random_subset(seq, m, rng): @py_random_state(2) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def barabasi_albert_graph(n, m, seed=None, initial_graph=None): """Returns a random graph using Barabási–Albert preferential attachment @@ -695,7 +695,7 @@ def barabasi_albert_graph(n, m, seed=None, initial_graph=None): @py_random_state(4) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def dual_barabasi_albert_graph(n, m1, m2, p, seed=None, initial_graph=None): """Returns a random graph using dual Barabási–Albert preferential attachment @@ -795,7 +795,7 @@ def dual_barabasi_albert_graph(n, m1, m2, p, seed=None, initial_graph=None): @py_random_state(4) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def extended_barabasi_albert_graph(n, m, p, q, seed=None): """Returns an extended Barabási–Albert model graph. @@ -956,7 +956,7 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def powerlaw_cluster_graph(n, m, p, seed=None): """Holme and Kim algorithm for growing graphs with powerlaw degree distribution and approximate average clustering. @@ -1046,7 +1046,7 @@ def powerlaw_cluster_graph(n, m, p, seed=None): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_lobster(n, p1, p2, seed=None): """Returns a random lobster graph. @@ -1097,7 +1097,7 @@ def random_lobster(n, p1, p2, seed=None): @py_random_state(1) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_shell_graph(constructor, seed=None): """Returns a random shell graph for the constructor given. @@ -1155,7 +1155,7 @@ def random_shell_graph(constructor, seed=None): @py_random_state(2) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_powerlaw_tree(n, gamma=3, seed=None, tries=100): """Returns a tree with a power law degree distribution. @@ -1192,7 +1192,7 @@ def random_powerlaw_tree(n, gamma=3, seed=None, tries=100): @py_random_state(2) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_powerlaw_tree_sequence(n, gamma=3, seed=None, tries=100): """Returns a degree sequence for a tree with a power law distribution. @@ -1249,7 +1249,7 @@ def random_powerlaw_tree_sequence(n, gamma=3, seed=None, tries=100): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_kernel_graph(n, kernel_integral, kernel_root=None, seed=None): r"""Returns an random graph based on the specified kernel. diff --git a/networkx/generators/small.py b/networkx/generators/small.py index 77109551c19..0efa62bc521 100644 --- a/networkx/generators/small.py +++ b/networkx/generators/small.py @@ -59,7 +59,7 @@ def wrapper(*args, **kwargs): return wrapper -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def LCF_graph(n, shift_list, repeats, create_using=None): """ Return the cubic graph specified in LCF notation. @@ -126,7 +126,7 @@ def LCF_graph(n, shift_list, repeats, create_using=None): @_raise_on_directed -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def bull_graph(create_using=None): """ Returns the Bull Graph @@ -160,7 +160,7 @@ def bull_graph(create_using=None): @_raise_on_directed -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def chvatal_graph(create_using=None): """ Returns the Chvátal Graph @@ -205,7 +205,7 @@ def chvatal_graph(create_using=None): @_raise_on_directed -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def cubical_graph(create_using=None): """ Returns the 3-regular Platonic Cubical Graph @@ -248,7 +248,7 @@ def cubical_graph(create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def desargues_graph(create_using=None): """ Returns the Desargues Graph @@ -279,7 +279,7 @@ def desargues_graph(create_using=None): @_raise_on_directed -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def diamond_graph(create_using=None): """ Returns the Diamond graph @@ -308,7 +308,7 @@ def diamond_graph(create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def dodecahedral_graph(create_using=None): """ Returns the Platonic Dodecahedral graph. @@ -339,7 +339,7 @@ def dodecahedral_graph(create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def frucht_graph(create_using=None): """ Returns the Frucht Graph. @@ -386,7 +386,7 @@ def frucht_graph(create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def heawood_graph(create_using=None): """ Returns the Heawood Graph, a (3,6) cage. @@ -420,7 +420,7 @@ def heawood_graph(create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def hoffman_singleton_graph(): """ Returns the Hoffman-Singleton Graph. @@ -464,7 +464,7 @@ def hoffman_singleton_graph(): @_raise_on_directed -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def house_graph(create_using=None): """ Returns the House graph (square with triangle on top) @@ -495,7 +495,7 @@ def house_graph(create_using=None): @_raise_on_directed -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def house_x_graph(create_using=None): """ Returns the House graph with a cross inside the house square. @@ -525,7 +525,7 @@ def house_x_graph(create_using=None): @_raise_on_directed -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def icosahedral_graph(create_using=None): """ Returns the Platonic Icosahedral graph. @@ -568,7 +568,7 @@ def icosahedral_graph(create_using=None): @_raise_on_directed -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def krackhardt_kite_graph(create_using=None): """ Returns the Krackhardt Kite Social Network. @@ -618,7 +618,7 @@ def krackhardt_kite_graph(create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def moebius_kantor_graph(create_using=None): """ Returns the Moebius-Kantor graph. @@ -648,7 +648,7 @@ def moebius_kantor_graph(create_using=None): @_raise_on_directed -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def octahedral_graph(create_using=None): """ Returns the Platonic Octahedral graph. @@ -683,7 +683,7 @@ def octahedral_graph(create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def pappus_graph(): """ Returns the Pappus graph. @@ -707,7 +707,7 @@ def pappus_graph(): @_raise_on_directed -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def petersen_graph(create_using=None): """ Returns the Petersen graph. @@ -751,7 +751,7 @@ def petersen_graph(create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def sedgewick_maze_graph(create_using=None): """ Return a small maze with a cycle. @@ -784,7 +784,7 @@ def sedgewick_maze_graph(create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def tetrahedral_graph(create_using=None): """ Returns the 3-regular Platonic Tetrahedral graph. @@ -814,7 +814,7 @@ def tetrahedral_graph(create_using=None): @_raise_on_directed -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def truncated_cube_graph(create_using=None): """ Returns the skeleton of the truncated cube. @@ -872,7 +872,7 @@ def truncated_cube_graph(create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def truncated_tetrahedron_graph(create_using=None): """ Returns the skeleton of the truncated Platonic tetrahedron. @@ -903,7 +903,7 @@ def truncated_tetrahedron_graph(create_using=None): @_raise_on_directed -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def tutte_graph(create_using=None): """ Returns the Tutte graph. diff --git a/networkx/generators/social.py b/networkx/generators/social.py index 5391c834acf..5d950649c3e 100644 --- a/networkx/generators/social.py +++ b/networkx/generators/social.py @@ -11,7 +11,7 @@ ] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def karate_club_graph(): """Returns Zachary's Karate Club graph. @@ -93,7 +93,7 @@ def karate_club_graph(): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def davis_southern_women_graph(): """Returns Davis Southern women social network. @@ -244,7 +244,7 @@ def davis_southern_women_graph(): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def florentine_families_graph(): """Returns Florentine families graph. @@ -278,7 +278,7 @@ def florentine_families_graph(): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def les_miserables_graph(): """Returns coappearance network of characters in the novel Les Miserables. diff --git a/networkx/generators/spectral_graph_forge.py b/networkx/generators/spectral_graph_forge.py index 0371b68f59b..cdd1250fa18 100644 --- a/networkx/generators/spectral_graph_forge.py +++ b/networkx/generators/spectral_graph_forge.py @@ -8,7 +8,7 @@ @np_random_state(3) -@nx._dispatch +@nx._dispatchable def spectral_graph_forge(G, alpha, transformation="identity", seed=None): """Returns a random simple graph with spectrum resembling that of `G` diff --git a/networkx/generators/stochastic.py b/networkx/generators/stochastic.py index b8084532bc3..75d9c0b25b3 100644 --- a/networkx/generators/stochastic.py +++ b/networkx/generators/stochastic.py @@ -11,7 +11,7 @@ @not_implemented_for("undirected") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def stochastic_graph(G, copy=True, weight="weight"): """Returns a right-stochastic representation of directed graph `G`. diff --git a/networkx/generators/sudoku.py b/networkx/generators/sudoku.py index bc91505b179..c8ac57c48c0 100644 --- a/networkx/generators/sudoku.py +++ b/networkx/generators/sudoku.py @@ -47,7 +47,7 @@ __all__ = ["sudoku_graph"] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def sudoku_graph(n=3): """Returns the n-Sudoku graph. The default value of n is 3. diff --git a/networkx/generators/time_series.py b/networkx/generators/time_series.py index 7373f8edc38..ac060627bac 100644 --- a/networkx/generators/time_series.py +++ b/networkx/generators/time_series.py @@ -8,7 +8,7 @@ __all__ = ["visibility_graph"] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def visibility_graph(series): """ Return a Visibility Graph of an input Time Series. diff --git a/networkx/generators/trees.py b/networkx/generators/trees.py index a24f8994c5f..60c2fc640e3 100644 --- a/networkx/generators/trees.py +++ b/networkx/generators/trees.py @@ -48,7 +48,7 @@ ] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def prefix_tree(paths): """Creates a directed prefix tree from a list of paths. @@ -181,7 +181,7 @@ def get_children(parent, paths): return tree -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def prefix_tree_recursive(paths): """Recursively creates a directed prefix tree from a list of paths. @@ -324,7 +324,7 @@ def _helper(paths, root, tree): @py_random_state(1) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_tree(n, seed=None, create_using=None): """Returns a uniformly random tree on `n` nodes. @@ -426,7 +426,7 @@ def random_tree(n, seed=None, create_using=None): @py_random_state("seed") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_labeled_tree(n, *, seed=None): """Returns a labeled tree on `n` nodes chosen uniformly at random. @@ -462,7 +462,7 @@ def random_labeled_tree(n, *, seed=None): @py_random_state("seed") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_labeled_rooted_tree(n, *, seed=None): """Returns a labeled rooted tree with `n` nodes. @@ -499,7 +499,7 @@ def random_labeled_rooted_tree(n, *, seed=None): @py_random_state("seed") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_labeled_rooted_forest(n, *, seed=None): """Returns a labeled rooted forest with `n` nodes. @@ -737,7 +737,7 @@ def _random_unlabeled_rooted_tree(n, cache_trees, seed): @py_random_state("seed") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_unlabeled_rooted_tree(n, *, number_of_trees=None, seed=None): """Returns a number of unlabeled rooted trees uniformly at random @@ -922,7 +922,7 @@ def _random_unlabeled_rooted_forest(n, q, cache_trees, cache_forests, seed): @py_random_state("seed") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_unlabeled_rooted_forest(n, *, q=None, number_of_forests=None, seed=None): """Returns a forest or list of forests selected at random. @@ -1099,7 +1099,7 @@ def _random_unlabeled_tree(n, cache_trees, cache_forests, seed): @py_random_state("seed") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def random_unlabeled_tree(n, *, number_of_trees=None, seed=None): """Returns a tree or list of trees chosen randomly. diff --git a/networkx/generators/triads.py b/networkx/generators/triads.py index 86d522bba4a..40b0f82a210 100644 --- a/networkx/generators/triads.py +++ b/networkx/generators/triads.py @@ -33,7 +33,7 @@ } -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def triad_graph(triad_name): """Returns the triad graph with the given name. diff --git a/networkx/linalg/algebraicconnectivity.py b/networkx/linalg/algebraicconnectivity.py index b70d204a84d..70f23bbac97 100644 --- a/networkx/linalg/algebraicconnectivity.py +++ b/networkx/linalg/algebraicconnectivity.py @@ -312,7 +312,7 @@ def find_fiedler(L, x, normalized, tol, seed): @not_implemented_for("directed") @np_random_state(5) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def algebraic_connectivity( G, weight="weight", normalized=False, tol=1e-8, method="tracemin_pcg", seed=None ): @@ -408,7 +408,7 @@ def algebraic_connectivity( @not_implemented_for("directed") @np_random_state(5) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def fiedler_vector( G, weight="weight", normalized=False, tol=1e-8, method="tracemin_pcg", seed=None ): @@ -505,7 +505,7 @@ def fiedler_vector( @np_random_state(5) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def spectral_ordering( G, weight="weight", normalized=False, tol=1e-8, method="tracemin_pcg", seed=None ): @@ -588,7 +588,7 @@ def spectral_ordering( return order -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def spectral_bisection( G, weight="weight", normalized=False, tol=1e-8, method="tracemin_pcg", seed=None ): diff --git a/networkx/linalg/attrmatrix.py b/networkx/linalg/attrmatrix.py index eb6e57c857c..4882c35af4b 100644 --- a/networkx/linalg/attrmatrix.py +++ b/networkx/linalg/attrmatrix.py @@ -142,7 +142,7 @@ def value(u, v): return value -@nx._dispatch(edge_attrs={"edge_attr": None}, node_attrs="node_attr") +@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr") def attr_matrix( G, edge_attr=None, @@ -306,7 +306,7 @@ def attr_matrix( return M -@nx._dispatch(edge_attrs={"edge_attr": None}, node_attrs="node_attr") +@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr") def attr_sparse_matrix( G, edge_attr=None, node_attr=None, normalized=False, rc_order=None, dtype=None ): diff --git a/networkx/linalg/bethehessianmatrix.py b/networkx/linalg/bethehessianmatrix.py index 542fd1c6712..382e5181047 100644 --- a/networkx/linalg/bethehessianmatrix.py +++ b/networkx/linalg/bethehessianmatrix.py @@ -7,7 +7,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def bethe_hessian_matrix(G, r=None, nodelist=None): r"""Returns the Bethe Hessian matrix of G. diff --git a/networkx/linalg/graphmatrix.py b/networkx/linalg/graphmatrix.py index 9dece36b48c..640fccc6e2e 100644 --- a/networkx/linalg/graphmatrix.py +++ b/networkx/linalg/graphmatrix.py @@ -6,7 +6,7 @@ __all__ = ["incidence_matrix", "adjacency_matrix"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def incidence_matrix( G, nodelist=None, edgelist=None, oriented=False, weight=None, *, dtype=None ): @@ -104,7 +104,7 @@ def incidence_matrix( return A.asformat("csc") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def adjacency_matrix(G, nodelist=None, dtype=None, weight="weight"): """Returns adjacency matrix of G. diff --git a/networkx/linalg/laplacianmatrix.py b/networkx/linalg/laplacianmatrix.py index bd9e1d3c45f..73ce45e4ce4 100644 --- a/networkx/linalg/laplacianmatrix.py +++ b/networkx/linalg/laplacianmatrix.py @@ -19,7 +19,7 @@ ] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def laplacian_matrix(G, nodelist=None, weight="weight"): """Returns the Laplacian matrix of G. @@ -108,7 +108,7 @@ def laplacian_matrix(G, nodelist=None, weight="weight"): return D - A -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def normalized_laplacian_matrix(G, nodelist=None, weight="weight"): r"""Returns the normalized Laplacian matrix of G. @@ -207,7 +207,7 @@ def normalized_laplacian_matrix(G, nodelist=None, weight="weight"): return DH @ (L @ DH) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def total_spanning_tree_weight(G, weight=None): """ Returns the total weight of all spanning trees of `G`. @@ -246,7 +246,7 @@ def total_spanning_tree_weight(G, weight=None): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def directed_laplacian_matrix( G, nodelist=None, weight="weight", walk_type=None, alpha=0.95 ): @@ -345,7 +345,7 @@ def directed_laplacian_matrix( @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def directed_combinatorial_laplacian_matrix( G, nodelist=None, weight="weight", walk_type=None, alpha=0.95 ): diff --git a/networkx/linalg/modularitymatrix.py b/networkx/linalg/modularitymatrix.py index 03671a1fa10..fc599b35393 100644 --- a/networkx/linalg/modularitymatrix.py +++ b/networkx/linalg/modularitymatrix.py @@ -8,7 +8,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def modularity_matrix(G, nodelist=None, weight=None): r"""Returns the modularity matrix of G. @@ -77,7 +77,7 @@ def modularity_matrix(G, nodelist=None, weight=None): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def directed_modularity_matrix(G, nodelist=None, weight=None): """Returns the directed modularity matrix of G. diff --git a/networkx/linalg/spectrum.py b/networkx/linalg/spectrum.py index 979eeabd814..16dfa148c30 100644 --- a/networkx/linalg/spectrum.py +++ b/networkx/linalg/spectrum.py @@ -12,7 +12,7 @@ ] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def laplacian_spectrum(G, weight="weight"): """Returns eigenvalues of the Laplacian of G @@ -56,7 +56,7 @@ def laplacian_spectrum(G, weight="weight"): return sp.linalg.eigvalsh(nx.laplacian_matrix(G, weight=weight).todense()) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def normalized_laplacian_spectrum(G, weight="weight"): """Return eigenvalues of the normalized Laplacian of G @@ -90,7 +90,7 @@ def normalized_laplacian_spectrum(G, weight="weight"): ) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def adjacency_spectrum(G, weight="weight"): """Returns eigenvalues of the adjacency matrix of G. @@ -122,7 +122,7 @@ def adjacency_spectrum(G, weight="weight"): return sp.linalg.eigvals(nx.adjacency_matrix(G, weight=weight).todense()) -@nx._dispatch +@nx._dispatchable def modularity_spectrum(G): """Returns eigenvalues of the modularity matrix of G. @@ -153,7 +153,7 @@ def modularity_spectrum(G): return sp.linalg.eigvals(nx.modularity_matrix(G)) -@nx._dispatch +@nx._dispatchable def bethe_hessian_spectrum(G, r=None): """Returns eigenvalues of the Bethe Hessian matrix of G. diff --git a/networkx/readwrite/adjlist.py b/networkx/readwrite/adjlist.py index 243550bde2d..a14ad430086 100644 --- a/networkx/readwrite/adjlist.py +++ b/networkx/readwrite/adjlist.py @@ -150,7 +150,7 @@ def write_adjlist(G, path, comments="#", delimiter=" ", encoding="utf-8"): path.write(line.encode(encoding)) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def parse_adjlist( lines, comments="#", delimiter=None, create_using=None, nodetype=None ): @@ -224,7 +224,7 @@ def parse_adjlist( @open_file(0, mode="rb") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def read_adjlist( path, comments="#", diff --git a/networkx/readwrite/edgelist.py b/networkx/readwrite/edgelist.py index 733b544326c..fcf6ec23013 100644 --- a/networkx/readwrite/edgelist.py +++ b/networkx/readwrite/edgelist.py @@ -173,7 +173,7 @@ def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="ut path.write(line.encode(encoding)) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def parse_edgelist( lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True ): @@ -298,7 +298,7 @@ def parse_edgelist( @open_file(0, mode="rb") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def read_edgelist( path, comments="#", @@ -425,7 +425,7 @@ def write_weighted_edgelist(G, path, comments="#", delimiter=" ", encoding="utf- ) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def read_weighted_edgelist( path, comments="#", diff --git a/networkx/readwrite/gexf.py b/networkx/readwrite/gexf.py index 35be5568fc2..de444c5b31a 100644 --- a/networkx/readwrite/gexf.py +++ b/networkx/readwrite/gexf.py @@ -133,7 +133,7 @@ def generate_gexf(G, encoding="utf-8", prettyprint=True, version="1.2draft"): @open_file(0, mode="rb") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def read_gexf(path, node_type=None, relabel=False, version="1.2draft"): """Read graph in GEXF format from path. diff --git a/networkx/readwrite/gml.py b/networkx/readwrite/gml.py index f04c28232ae..f76fc204203 100644 --- a/networkx/readwrite/gml.py +++ b/networkx/readwrite/gml.py @@ -112,7 +112,7 @@ def literal_destringizer(rep): @open_file(0, mode="rb") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def read_gml(path, label="label", destringizer=None): """Read graph in GML format from `path`. @@ -195,7 +195,7 @@ def filter_lines(lines): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def parse_gml(lines, label="label", destringizer=None): """Parse GML graph from a string or iterable. diff --git a/networkx/readwrite/graph6.py b/networkx/readwrite/graph6.py index ab462b69b85..06578e05ed6 100644 --- a/networkx/readwrite/graph6.py +++ b/networkx/readwrite/graph6.py @@ -60,7 +60,7 @@ def _generate_graph6_bytes(G, nodes, header): yield b"\n" -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_graph6_bytes(bytes_in): """Read a simple undirected graph in graph6 format from bytes. @@ -184,7 +184,7 @@ def to_graph6_bytes(G, nodes=None, header=True): @open_file(0, mode="rb") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def read_graph6(path): """Read simple undirected graphs in graph6 format from path. diff --git a/networkx/readwrite/graphml.py b/networkx/readwrite/graphml.py index d18c0e8dde9..4de6ca1e882 100644 --- a/networkx/readwrite/graphml.py +++ b/networkx/readwrite/graphml.py @@ -233,7 +233,7 @@ def generate_graphml( @open_file(0, mode="rb") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def read_graphml(path, node_type=str, edge_key_type=int, force_multigraph=False): """Read graph in GraphML format from path. @@ -306,7 +306,7 @@ def read_graphml(path, node_type=str, edge_key_type=int, force_multigraph=False) return glist[0] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def parse_graphml( graphml_string, node_type=str, edge_key_type=int, force_multigraph=False ): diff --git a/networkx/readwrite/json_graph/adjacency.py b/networkx/readwrite/json_graph/adjacency.py index 75695d3dff5..8fb24ea031b 100644 --- a/networkx/readwrite/json_graph/adjacency.py +++ b/networkx/readwrite/json_graph/adjacency.py @@ -81,7 +81,7 @@ def adjacency_data(G, attrs=_attrs): return data -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def adjacency_graph(data, directed=False, multigraph=True, attrs=_attrs): """Returns graph from adjacency data format. diff --git a/networkx/readwrite/json_graph/cytoscape.py b/networkx/readwrite/json_graph/cytoscape.py index 7689192d471..306c4b4f418 100644 --- a/networkx/readwrite/json_graph/cytoscape.py +++ b/networkx/readwrite/json_graph/cytoscape.py @@ -80,7 +80,7 @@ def cytoscape_data(G, name="name", ident="id"): return jsondata -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def cytoscape_graph(data, name="name", ident="id"): """ Create a NetworkX graph from a dictionary in cytoscape JSON format. diff --git a/networkx/readwrite/json_graph/node_link.py b/networkx/readwrite/json_graph/node_link.py index 4a2e8c6d486..a9fbb29b2ad 100644 --- a/networkx/readwrite/json_graph/node_link.py +++ b/networkx/readwrite/json_graph/node_link.py @@ -132,7 +132,7 @@ def node_link_data( return data -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def node_link_graph( data, directed=False, diff --git a/networkx/readwrite/json_graph/tree.py b/networkx/readwrite/json_graph/tree.py index c0b3af18378..bd8679fcab1 100644 --- a/networkx/readwrite/json_graph/tree.py +++ b/networkx/readwrite/json_graph/tree.py @@ -83,7 +83,7 @@ def add_children(n, G): return {**G.nodes[root], ident: root, children: add_children(root, G)} -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def tree_graph(data, ident="id", children="children"): """Returns graph from tree data format. diff --git a/networkx/readwrite/leda.py b/networkx/readwrite/leda.py index 735f2779b4a..d07162b65f3 100644 --- a/networkx/readwrite/leda.py +++ b/networkx/readwrite/leda.py @@ -19,7 +19,7 @@ @open_file(0, mode="rb") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def read_leda(path, encoding="UTF-8"): """Read graph in LEDA format from path. @@ -46,7 +46,7 @@ def read_leda(path, encoding="UTF-8"): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def parse_leda(lines): """Read graph in LEDA format from string or iterable. diff --git a/networkx/readwrite/multiline_adjlist.py b/networkx/readwrite/multiline_adjlist.py index d4156ee3f33..fe542e6a1fd 100644 --- a/networkx/readwrite/multiline_adjlist.py +++ b/networkx/readwrite/multiline_adjlist.py @@ -191,7 +191,7 @@ def write_multiline_adjlist(G, path, delimiter=" ", comments="#", encoding="utf- path.write(multiline.encode(encoding)) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def parse_multiline_adjlist( lines, comments="#", delimiter=None, create_using=None, nodetype=None, edgetype=None ): @@ -301,7 +301,7 @@ def parse_multiline_adjlist( @open_file(0, mode="rb") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def read_multiline_adjlist( path, comments="#", diff --git a/networkx/readwrite/p2g.py b/networkx/readwrite/p2g.py index 84c72572e91..2afba8c6f4d 100644 --- a/networkx/readwrite/p2g.py +++ b/networkx/readwrite/p2g.py @@ -57,7 +57,7 @@ def write_p2g(G, path, encoding="utf-8"): @open_file(0, mode="r") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def read_p2g(path, encoding="utf-8"): """Read graph in p2g format from path. @@ -75,7 +75,7 @@ def read_p2g(path, encoding="utf-8"): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def parse_p2g(lines): """Parse p2g format graph from string or iterable. diff --git a/networkx/readwrite/pajek.py b/networkx/readwrite/pajek.py index 0c6050f1507..fef19408001 100644 --- a/networkx/readwrite/pajek.py +++ b/networkx/readwrite/pajek.py @@ -130,7 +130,7 @@ def write_pajek(G, path, encoding="UTF-8"): @open_file(0, mode="rb") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def read_pajek(path, encoding="UTF-8"): """Read graph in Pajek format from path. @@ -163,7 +163,7 @@ def read_pajek(path, encoding="UTF-8"): return parse_pajek(lines) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def parse_pajek(lines): """Parse Pajek format graph from string or iterable. diff --git a/networkx/readwrite/sparse6.py b/networkx/readwrite/sparse6.py index 054f1a0d14c..04fe9e6edc6 100644 --- a/networkx/readwrite/sparse6.py +++ b/networkx/readwrite/sparse6.py @@ -101,7 +101,7 @@ def enc(x): yield b"\n" -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def from_sparse6_bytes(string): """Read an undirected graph in sparse6 format from string. @@ -250,7 +250,7 @@ def to_sparse6_bytes(G, nodes=None, header=True): @open_file(0, mode="rb") -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def read_sparse6(path): """Read an undirected graph in sparse6 format from path. diff --git a/networkx/relabel.py b/networkx/relabel.py index f55695bdfe9..93a404921f9 100644 --- a/networkx/relabel.py +++ b/networkx/relabel.py @@ -3,7 +3,7 @@ __all__ = ["convert_node_labels_to_integers", "relabel_nodes"] -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True) def relabel_nodes(G, mapping, copy=True): """Relabel the nodes of the graph G according to a given mapping. @@ -221,7 +221,7 @@ def _relabel_copy(G, mapping): return H -@nx._dispatch( +@nx._dispatchable( preserve_edge_attrs=True, preserve_node_attrs=True, preserve_graph_attrs=True ) def convert_node_labels_to_integers( diff --git a/networkx/utils/backends.py b/networkx/utils/backends.py index 6c0e5649bd3..26cec6d2d83 100644 --- a/networkx/utils/backends.py +++ b/networkx/utils/backends.py @@ -95,7 +95,7 @@ class WrappedSparse: from ..exception import NetworkXNotImplemented -__all__ = ["_dispatch"] +__all__ = ["_dispatchable"] def _get_backends(group, *, load_and_call=False): @@ -141,14 +141,14 @@ def _load_backend(backend_name): _registered_algorithms = {} -class _dispatch: +class _dispatchable: # Allow any of the following decorator forms: - # - @_dispatch - # - @_dispatch() - # - @_dispatch(name="override_name") - # - @_dispatch(graphs="graph") - # - @_dispatch(edge_attrs="weight") - # - @_dispatch(graphs={"G": 0, "H": 1}, edge_attrs={"weight": "default"}) + # - @_dispatchable + # - @_dispatchable() + # - @_dispatchable(name="override_name") + # - @_dispatchable(graphs="graph") + # - @_dispatchable(edge_attrs="weight") + # - @_dispatchable(graphs={"G": 0, "H": 1}, edge_attrs={"weight": "default"}) # These class attributes are currently used to allow backends to run networkx tests. # For example: `PYTHONPATH=. pytest --backend graphblas --fallback-to-nx` @@ -192,7 +192,7 @@ def __new__( argument of the wrapped function. If more than one graph is required for the algorithm (or if the graph is not the first argument), provide a dict of parameter name to argument position for each graph argument. - For example, ``@_dispatch(graphs={"G": 0, "auxiliary?": 4})`` + For example, ``@_dispatchable(graphs={"G": 0, "auxiliary?": 4})`` indicates the 0th parameter ``G`` of the function is a required graph, and the 4th parameter ``auxiliary`` is an optional graph. To indicate an argument is a list of graphs, do e.g. ``"[graphs]"``. @@ -235,7 +235,7 @@ def __new__( """ if func is None: return partial( - _dispatch, + _dispatchable, name=name, graphs=graphs, edge_attrs=edge_attrs, @@ -990,10 +990,10 @@ def __reduce__(self): This uses the global registry `_registered_algorithms` to deserialize. """ - return _restore_dispatch, (self.name,) + return _restore_dispatchable, (self.name,) -def _restore_dispatch(name): +def _restore_dispatchable(name): return _registered_algorithms[name] @@ -1003,17 +1003,17 @@ def _restore_dispatch(name): # This doesn't show e.g. `*, backend=None, **backend_kwargs` in the # signatures, which is probably okay. It does allow the docstring to be # updated based on the installed backends. - _orig_dispatch = _dispatch + _orig_dispatchable = _dispatchable - def _dispatch(func=None, **kwargs): # type: ignore[no-redef] + def _dispatchable(func=None, **kwargs): # type: ignore[no-redef] if func is None: - return partial(_dispatch, **kwargs) - dispatched_func = _orig_dispatch(func, **kwargs) + return partial(_dispatchable, **kwargs) + dispatched_func = _orig_dispatchable(func, **kwargs) func.__doc__ = dispatched_func.__doc__ return func - _dispatch.__doc__ = _orig_dispatch.__new__.__doc__ # type: ignore[method-assign,assignment] - _sig = inspect.signature(_orig_dispatch.__new__) - _dispatch.__signature__ = _sig.replace( # type: ignore[method-assign,assignment] + _dispatchable.__doc__ = _orig_dispatchable.__new__.__doc__ # type: ignore[method-assign,assignment] + _sig = inspect.signature(_orig_dispatchable.__new__) + _dispatchable.__signature__ = _sig.replace( # type: ignore[method-assign,assignment] parameters=[v for k, v in _sig.parameters.items() if k != "cls"] ) From e35e98b7fa512aef56243b548a9d078362359764 Mon Sep 17 00:00:00 2001 From: Mridul Seth Date: Fri, 12 Jan 2024 19:36:16 +0400 Subject: [PATCH 039/129] DOC: build with nx-parallel extra documentation information (#7220) * DOC: build with nx-parallel extra documentation information * also add it to circleci config --- .circleci/config.yml | 2 ++ .github/workflows/deploy-docs.yml | 2 ++ 2 files changed, 4 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index b7dfef39c00..b9277b86471 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -44,6 +44,8 @@ jobs: pip install git+https://github.com/rapidsai/cugraph.git#subdirectory=python/nx-cugraph --no-deps # Development version of GraphBLAS backend pip install git+https://github.com/python-graphblas/graphblas-algorithms.git@main --no-deps + # Development version of nx-parallel backend + pip install git+https://github.com/networkx/nx-parallel.git@main --no-deps pip list - save_cache: diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml index 1bc885907b3..a45e6255bd2 100644 --- a/.github/workflows/deploy-docs.yml +++ b/.github/workflows/deploy-docs.yml @@ -38,6 +38,8 @@ jobs: pip install git+https://github.com/rapidsai/cugraph.git#subdirectory=python/nx-cugraph --no-deps # Development version of GraphBLAS backend pip install git+https://github.com/python-graphblas/graphblas-algorithms.git@main --no-deps + # Development version of nx-parallel backend + pip install git+https://github.com/networkx/nx-parallel.git@main --no-deps pip list # To set up a cross-repository deploy key: From fd3cadc48c7277d12f2a27a03b2acb7b9783f783 Mon Sep 17 00:00:00 2001 From: Jangwon Yie Date: Fri, 12 Jan 2024 21:36:52 -0500 Subject: [PATCH 040/129] Add functions to compute Schultz and Gutman Index (#3709) * Add implementaions of Schultz index of first, second kind and their unit tests * Add schultz.rst and update __init__.py * remove an ambiguous variable * move new functions to wiener.py clean up references and docs * Minor docstring touchups. --------- Co-authored-by: Dan Schult Co-authored-by: Ross Barnowski --- doc/reference/algorithms/wiener.rst | 4 +- networkx/algorithms/__init__.py | 2 +- networkx/algorithms/tests/test_wiener.py | 189 +++++++++++++++-------- networkx/algorithms/wiener.py | 189 ++++++++++++++++++++--- 4 files changed, 294 insertions(+), 90 deletions(-) diff --git a/doc/reference/algorithms/wiener.rst b/doc/reference/algorithms/wiener.rst index c0ac446adb8..acd070a99aa 100644 --- a/doc/reference/algorithms/wiener.rst +++ b/doc/reference/algorithms/wiener.rst @@ -1,5 +1,5 @@ ************ -Wiener index +Wiener Index ************ .. automodule:: networkx.algorithms.wiener @@ -7,3 +7,5 @@ Wiener index :toctree: generated/ wiener_index + schultz_index + gutman_index diff --git a/networkx/algorithms/__init__.py b/networkx/algorithms/__init__.py index db6d6cebb19..eda2912cadd 100644 --- a/networkx/algorithms/__init__.py +++ b/networkx/algorithms/__init__.py @@ -37,6 +37,7 @@ from networkx.algorithms.operators import * from networkx.algorithms.planarity import * from networkx.algorithms.planar_drawing import * +from networkx.algorithms.polynomials import * from networkx.algorithms.reciprocity import * from networkx.algorithms.regular import * from networkx.algorithms.richclub import * @@ -57,7 +58,6 @@ from networkx.algorithms.voronoi import * from networkx.algorithms.walks import * from networkx.algorithms.wiener import * -from networkx.algorithms.polynomials import * # Make certain subpackages available to the user as direct imports from # the `networkx` namespace. diff --git a/networkx/algorithms/tests/test_wiener.py b/networkx/algorithms/tests/test_wiener.py index 1cb404064fe..aded95143ca 100644 --- a/networkx/algorithms/tests/test_wiener.py +++ b/networkx/algorithms/tests/test_wiener.py @@ -1,66 +1,123 @@ -"""Unit tests for the :mod:`networkx.algorithms.wiener` module.""" - - -from networkx import DiGraph, complete_graph, empty_graph, path_graph, wiener_index - - -class TestWienerIndex: - """Unit tests for computing the Wiener index of a graph.""" - - def test_disconnected_graph(self): - """Tests that the Wiener index of a disconnected graph is - positive infinity. - - """ - assert wiener_index(empty_graph(2)) == float("inf") - - def test_directed(self): - """Tests that each pair of nodes in the directed graph is - counted once when computing the Wiener index. - - """ - G = complete_graph(3) - H = DiGraph(G) - assert (2 * wiener_index(G)) == wiener_index(H) - - def test_complete_graph(self): - """Tests that the Wiener index of the complete graph is simply - the number of edges. - - """ - n = 10 - G = complete_graph(n) - assert wiener_index(G) == (n * (n - 1) / 2) - - def test_path_graph(self): - """Tests that the Wiener index of the path graph is correctly - computed. - - """ - # In P_n, there are n - 1 pairs of vertices at distance one, n - - # 2 pairs at distance two, n - 3 at distance three, ..., 1 at - # distance n - 1, so the Wiener index should be - # - # 1 * (n - 1) + 2 * (n - 2) + ... + (n - 2) * 2 + (n - 1) * 1 - # - # For example, in P_5, - # - # 1 * 4 + 2 * 3 + 3 * 2 + 4 * 1 = 2 (1 * 4 + 2 * 3) - # - # and in P_6, - # - # 1 * 5 + 2 * 4 + 3 * 3 + 4 * 2 + 5 * 1 = 2 (1 * 5 + 2 * 4) + 3 * 3 - # - # assuming n is *odd*, this gives the formula - # - # 2 \sum_{i = 1}^{(n - 1) / 2} [i * (n - i)] - # - # assuming n is *even*, this gives the formula - # - # 2 \sum_{i = 1}^{n / 2} [i * (n - i)] - (n / 2) ** 2 - # - n = 9 - G = path_graph(n) - expected = 2 * sum(i * (n - i) for i in range(1, (n // 2) + 1)) - actual = wiener_index(G) - assert expected == actual +import networkx as nx + + +def test_wiener_index_of_disconnected_graph(): + assert nx.wiener_index(nx.empty_graph(2)) == float("inf") + + +def test_wiener_index_of_directed_graph(): + G = nx.complete_graph(3) + H = nx.DiGraph(G) + assert (2 * nx.wiener_index(G)) == nx.wiener_index(H) + + +def test_wiener_index_of_complete_graph(): + n = 10 + G = nx.complete_graph(n) + assert nx.wiener_index(G) == (n * (n - 1) / 2) + + +def test_wiener_index_of_path_graph(): + # In P_n, there are n - 1 pairs of vertices at distance one, n - + # 2 pairs at distance two, n - 3 at distance three, ..., 1 at + # distance n - 1, so the Wiener index should be + # + # 1 * (n - 1) + 2 * (n - 2) + ... + (n - 2) * 2 + (n - 1) * 1 + # + # For example, in P_5, + # + # 1 * 4 + 2 * 3 + 3 * 2 + 4 * 1 = 2 (1 * 4 + 2 * 3) + # + # and in P_6, + # + # 1 * 5 + 2 * 4 + 3 * 3 + 4 * 2 + 5 * 1 = 2 (1 * 5 + 2 * 4) + 3 * 3 + # + # assuming n is *odd*, this gives the formula + # + # 2 \sum_{i = 1}^{(n - 1) / 2} [i * (n - i)] + # + # assuming n is *even*, this gives the formula + # + # 2 \sum_{i = 1}^{n / 2} [i * (n - i)] - (n / 2) ** 2 + # + n = 9 + G = nx.path_graph(n) + expected = 2 * sum(i * (n - i) for i in range(1, (n // 2) + 1)) + actual = nx.wiener_index(G) + assert expected == actual + + +def test_schultz_and_gutman_index_of_disconnected_graph(): + n = 4 + G = nx.Graph() + G.add_nodes_from(list(range(1, n + 1))) + expected = float("inf") + + G.add_edge(1, 2) + G.add_edge(3, 4) + + actual_1 = nx.schultz_index(G) + actual_2 = nx.gutman_index(G) + + assert expected == actual_1 + assert expected == actual_2 + + +def test_schultz_and_gutman_index_of_complete_bipartite_graph_1(): + n = 3 + m = 3 + cbg = nx.complete_bipartite_graph(n, m) + + expected_1 = n * m * (n + m) + 2 * n * (n - 1) * m + 2 * m * (m - 1) * n + actual_1 = nx.schultz_index(cbg) + + expected_2 = n * m * (n * m) + n * (n - 1) * m * m + m * (m - 1) * n * n + actual_2 = nx.gutman_index(cbg) + + assert expected_1 == actual_1 + assert expected_2 == actual_2 + + +def test_schultz_and_gutman_index_of_complete_bipartite_graph_2(): + n = 2 + m = 5 + cbg = nx.complete_bipartite_graph(n, m) + + expected_1 = n * m * (n + m) + 2 * n * (n - 1) * m + 2 * m * (m - 1) * n + actual_1 = nx.schultz_index(cbg) + + expected_2 = n * m * (n * m) + n * (n - 1) * m * m + m * (m - 1) * n * n + actual_2 = nx.gutman_index(cbg) + + assert expected_1 == actual_1 + assert expected_2 == actual_2 + + +def test_schultz_and_gutman_index_of_complete_graph(): + n = 5 + cg = nx.complete_graph(n) + + expected_1 = n * (n - 1) * (n - 1) + actual_1 = nx.schultz_index(cg) + + assert expected_1 == actual_1 + + expected_2 = n * (n - 1) * (n - 1) * (n - 1) / 2 + actual_2 = nx.gutman_index(cg) + + assert expected_2 == actual_2 + + +def test_schultz_and_gutman_index_of_odd_cycle_graph(): + k = 5 + n = 2 * k + 1 + ocg = nx.cycle_graph(n) + + expected_1 = 2 * n * k * (k + 1) + actual_1 = nx.schultz_index(ocg) + + expected_2 = 2 * n * k * (k + 1) + actual_2 = nx.gutman_index(ocg) + + assert expected_1 == actual_1 + assert expected_2 == actual_2 diff --git a/networkx/algorithms/wiener.py b/networkx/algorithms/wiener.py index dd998ea33c0..471f8df0cf6 100644 --- a/networkx/algorithms/wiener.py +++ b/networkx/algorithms/wiener.py @@ -1,17 +1,25 @@ -"""Functions related to the Wiener index of a graph.""" +"""Functions related to the Wiener Index of a graph. -from itertools import chain +The Wiener Index is a topological measure of a graph +related to the distance between nodes and their degree. +The Schultz Index and Gutman Index are similar measures. +They are used categorize molecules via the network of +atoms connected by chemical bonds. The indices are +correlated with functional aspects of the molecules. -import networkx as nx +References +---------- +.. [1] `Wikipedia: Wiener Index `_ +.. [2] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices, + Croatica Chemica Acta, 71 (1998), 21-51. + https://hrcak.srce.hr/132323 +""" -from .components import is_connected, is_strongly_connected -from .shortest_paths import shortest_path_length as spl +import itertools as it -__all__ = ["wiener_index"] +import networkx as nx -#: Rename the :func:`chain.from_iterable` function for the sake of -#: brevity. -chaini = chain.from_iterable +__all__ = ["wiener_index", "schultz_index", "gutman_index"] @nx._dispatchable(edge_attrs="weight") @@ -19,21 +27,23 @@ def wiener_index(G, weight=None): """Returns the Wiener index of the given graph. The *Wiener index* of a graph is the sum of the shortest-path - distances between each pair of reachable nodes. For pairs of nodes - in undirected graphs, only one orientation of the pair is counted. + (weighted) distances between each pair of reachable nodes. + For pairs of nodes in undirected graphs, only one orientation + of the pair is counted. Parameters ---------- G : NetworkX graph - weight : object - The edge attribute to use as distance when computing - shortest-path distances. This is passed directly to the - :func:`networkx.shortest_path_length` function. + weight : string or None, optional (default: None) + If None, every edge has weight 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + The edge weights are used to computing shortest-path distances. Returns ------- - float + number The Wiener index of the graph `G`. Raises @@ -68,12 +78,147 @@ def wiener_index(G, weight=None): >>> nx.wiener_index(G) inf + References + ---------- + .. [1] `Wikipedia: Wiener Index `_ """ - is_directed = G.is_directed() - if (is_directed and not is_strongly_connected(G)) or ( - not is_directed and not is_connected(G) - ): + connected = nx.is_strongly_connected(G) if G.is_directed() else nx.is_connected(G) + if not connected: return float("inf") - total = sum(chaini(p.values() for v, p in spl(G, weight=weight))) + + spl = nx.shortest_path_length(G, weight=weight) + total = sum(it.chain.from_iterable(nbrs.values() for node, nbrs in spl)) # Need to account for double counting pairs of nodes in undirected graphs. - return total if is_directed else total / 2 + return total if G.is_directed() else total / 2 + + +@nx.utils.not_implemented_for("directed") +@nx.utils.not_implemented_for("multigraph") +def schultz_index(G, weight=None): + r"""Returns the Schultz Index (of the first kind) of `G` + + The *Schultz Index* [3]_ of a graph is the sum over all node pairs of + distances times the sum of degrees. Consider an undirected graph `G`. + For each node pair ``(u, v)`` compute ``dist(u, v) * (deg(u) + deg(v)`` + where ``dist`` is the shortest path length between two nodes and ``deg`` + is the degree of a node. + + The Schultz Index is the sum of these quantities over all (unordered) + pairs of nodes. + + Parameters + ---------- + G : NetworkX graph + The undirected graph of interest. + weight : string or None, optional (default: None) + If None, every edge has weight 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + The edge weights are used to computing shortest-path distances. + + Returns + ------- + number + The first kind of Schultz Index of the graph `G`. + + Examples + -------- + The Schultz Index of the (unweighted) complete graph on *n* nodes + equals the number of pairs of the *n* nodes times ``2 * (n - 1)``, + since each pair of nodes is at distance one and the sum of degree + of two nodes is ``2 * (n - 1)``. + + >>> n = 10 + >>> G = nx.complete_graph(n) + >>> nx.schultz_index(G) == (n * (n - 1) / 2) * (2 * (n - 1)) + True + + Graph that is disconnected + + >>> nx.schultz_index(nx.empty_graph(2)) + inf + + References + ---------- + .. [1] I. Gutman, Selected properties of the Schultz molecular topological index, + J. Chem. Inf. Comput. Sci. 34 (1994), 1087–1089. + https://doi.org/10.1021/ci00021a009 + .. [2] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices, + Croatica Chemica Acta, 71 (1998), 21-51. + https://hrcak.srce.hr/132323 + .. [3] H. P. Schultz, Topological organic chemistry. 1. + Graph theory and topological indices of alkanes,i + J. Chem. Inf. Comput. Sci. 29 (1989), 239–257. + + """ + if not nx.is_connected(G): + return float("inf") + + spl = nx.shortest_path_length(G, weight=weight) + d = dict(G.degree, weight=weight) + return sum(dist * (d[u] + d[v]) for u, info in spl for v, dist in info.items()) / 2 + + +@nx.utils.not_implemented_for("directed") +@nx.utils.not_implemented_for("multigraph") +def gutman_index(G, weight=None): + r"""Returns the Gutman Index for the graph `G`. + + The *Gutman Index* measures the topology of networks, especially for molecule + networks of atoms connected by bonds [1]_. It is also called the Schultz Index + of the second kind [2]_. + + Consider an undirected graph `G` with node set ``V``. + The Gutman Index of a graph is the sum over all (unordered) pairs of nodes + of nodes ``(u, v)``, with distance ``dist(u, v)`` and degrees ``deg(u)`` + and ``deg(v)``, of ``dist(u, v) * deg(u) * deg(v)`` + + Parameters + ---------- + G : NetworkX graph + + weight : string or None, optional (default: None) + If None, every edge has weight 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + The edge weights are used to computing shortest-path distances. + + Returns + ------- + number + The Gutman Index of the graph `G`. + + Examples + -------- + The Gutman Index of the (unweighted) complete graph on *n* nodes + equals the number of pairs of the *n* nodes times ``(n - 1) * (n - 1)``, + since each pair of nodes is at distance one and the product of degree of two + vertices is ``(n - 1) * (n - 1)``. + + >>> n = 10 + >>> G = nx.complete_graph(n) + >>> nx.gutman_index(G) == (n * (n - 1) / 2) * ((n - 1) * (n - 1)) + True + + Graphs that are disconnected + + >>> G = nx.empty_graph(2) + >>> nx.gutman_index(G) + inf + + References + ---------- + .. [1] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices, + Croatica Chemica Acta, 71 (1998), 21-51. + https://hrcak.srce.hr/132323 + .. [2] I. Gutman, Selected properties of the Schultz molecular topological index, + J. Chem. Inf. Comput. Sci. 34 (1994), 1087–1089. + https://doi.org/10.1021/ci00021a009 + + """ + if not nx.is_connected(G): + return float("inf") + + spl = nx.shortest_path_length(G, weight=weight) + d = dict(G.degree, weight=weight) + return sum(dist * d[u] * d[v] for u, vinfo in spl for v, dist in vinfo.items()) / 2 From b2f98a4cc9d32fbf8f598b5335519e33e0bc3db2 Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Sun, 14 Jan 2024 10:38:54 -0800 Subject: [PATCH 041/129] Replace tempfile with tmp_path fixture in test suite. (#7221) * Replace tempfile with tmp_path fixture in test suite. * A few more instances. --- .../bipartite/tests/test_edgelist.py | 46 +++---- networkx/drawing/tests/test_agraph.py | 27 +--- networkx/drawing/tests/test_pydot.py | 17 +-- networkx/readwrite/tests/test_adjlist.py | 86 ++++-------- networkx/readwrite/tests/test_edgelist.py | 56 +++----- networkx/readwrite/tests/test_gml.py | 125 ++++++++---------- networkx/readwrite/tests/test_graph6.py | 5 +- networkx/readwrite/tests/test_graphml.py | 56 +++----- networkx/readwrite/tests/test_pajek.py | 19 +-- networkx/readwrite/tests/test_sparse6.py | 17 +-- 10 files changed, 164 insertions(+), 290 deletions(-) diff --git a/networkx/algorithms/bipartite/tests/test_edgelist.py b/networkx/algorithms/bipartite/tests/test_edgelist.py index b388465ef4b..74035b35e9c 100644 --- a/networkx/algorithms/bipartite/tests/test_edgelist.py +++ b/networkx/algorithms/bipartite/tests/test_edgelist.py @@ -2,8 +2,6 @@ Unit tests for bipartite edgelists. """ import io -import os -import tempfile import pytest @@ -101,51 +99,47 @@ def test_write_edgelist_4(self): fh.seek(0) assert fh.read() == b"1 2 2.0\n3 2 3.0\n" - def test_unicode(self): + def test_unicode(self, tmp_path): G = nx.Graph() name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) G.add_edge(name1, "Radiohead", **{name2: 3}) G.add_node(name1, bipartite=0) G.add_node("Radiohead", bipartite=1) - fd, fname = tempfile.mkstemp() + + fname = tmp_path / "edgelist.txt" bipartite.write_edgelist(G, fname) H = bipartite.read_edgelist(fname) assert graphs_equal(G, H) - os.close(fd) - os.unlink(fname) - def test_latin1_issue(self): + def test_latin1_issue(self, tmp_path): G = nx.Graph() name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) G.add_edge(name1, "Radiohead", **{name2: 3}) G.add_node(name1, bipartite=0) G.add_node("Radiohead", bipartite=1) - fd, fname = tempfile.mkstemp() - pytest.raises( - UnicodeEncodeError, bipartite.write_edgelist, G, fname, encoding="latin-1" - ) - os.close(fd) - os.unlink(fname) - def test_latin1(self): + fname = tmp_path / "edgelist.txt" + with pytest.raises(UnicodeEncodeError): + bipartite.write_edgelist(G, fname, encoding="latin-1") + + def test_latin1(self, tmp_path): G = nx.Graph() name1 = "Bj" + chr(246) + "rk" name2 = chr(220) + "ber" G.add_edge(name1, "Radiohead", **{name2: 3}) G.add_node(name1, bipartite=0) G.add_node("Radiohead", bipartite=1) - fd, fname = tempfile.mkstemp() + + fname = tmp_path / "edgelist.txt" bipartite.write_edgelist(G, fname, encoding="latin-1") H = bipartite.read_edgelist(fname, encoding="latin-1") assert graphs_equal(G, H) - os.close(fd) - os.unlink(fname) - def test_edgelist_graph(self): + def test_edgelist_graph(self, tmp_path): G = self.G - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "edgelist.txt" bipartite.write_edgelist(G, fname) H = bipartite.read_edgelist(fname) H2 = bipartite.read_edgelist(fname) @@ -153,32 +147,26 @@ def test_edgelist_graph(self): G.remove_node("g") # isolated nodes are not written in edgelist assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_edgelist_integers(self): + def test_edgelist_integers(self, tmp_path): G = nx.convert_node_labels_to_integers(self.G) - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "edgelist.txt" bipartite.write_edgelist(G, fname) H = bipartite.read_edgelist(fname, nodetype=int) # isolated nodes are not written in edgelist G.remove_nodes_from(list(nx.isolates(G))) assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_edgelist_multigraph(self): + def test_edgelist_multigraph(self, tmp_path): G = self.MG - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "edgelist.txt" bipartite.write_edgelist(G, fname) H = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) H2 = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) def test_empty_digraph(self): with pytest.raises(nx.NetworkXNotImplemented): diff --git a/networkx/drawing/tests/test_agraph.py b/networkx/drawing/tests/test_agraph.py index fd99daaf757..75d5957bf8f 100644 --- a/networkx/drawing/tests/test_agraph.py +++ b/networkx/drawing/tests/test_agraph.py @@ -1,6 +1,4 @@ """Unit tests for PyGraphviz interface.""" -import os -import tempfile import warnings import pytest @@ -25,27 +23,26 @@ def assert_equal(self, G1, G2): assert edges_equal(G1.edges(), G2.edges()) assert G1.graph["metal"] == G2.graph["metal"] - def agraph_checks(self, G): + @pytest.mark.parametrize( + "G", (nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()) + ) + def test_agraph_roundtripping(self, G, tmp_path): G = self.build_graph(G) A = nx.nx_agraph.to_agraph(G) H = nx.nx_agraph.from_agraph(A) self.assert_equal(G, H) - fd, fname = tempfile.mkstemp() + fname = tmp_path / "test.dot" nx.drawing.nx_agraph.write_dot(H, fname) Hin = nx.nx_agraph.read_dot(fname) self.assert_equal(H, Hin) - os.close(fd) - os.unlink(fname) - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "fh_test.dot" with open(fname, "w") as fh: nx.drawing.nx_agraph.write_dot(H, fh) with open(fname) as fh: Hin = nx.nx_agraph.read_dot(fh) - os.close(fd) - os.unlink(fname) self.assert_equal(H, Hin) def test_from_agraph_name(self): @@ -75,18 +72,6 @@ def test_from_agraph_named_edges(self): assert isinstance(H, nx.Graph) assert ("0", "1", {"key": "foo"}) in H.edges(data=True) - def test_undirected(self): - self.agraph_checks(nx.Graph()) - - def test_directed(self): - self.agraph_checks(nx.DiGraph()) - - def test_multi_undirected(self): - self.agraph_checks(nx.MultiGraph()) - - def test_multi_directed(self): - self.agraph_checks(nx.MultiDiGraph()) - def test_to_agraph_with_nodedata(self): G = nx.Graph() G.add_node(1, color="red") diff --git a/networkx/drawing/tests/test_pydot.py b/networkx/drawing/tests/test_pydot.py index 7085bbecea3..671afac07c5 100644 --- a/networkx/drawing/tests/test_pydot.py +++ b/networkx/drawing/tests/test_pydot.py @@ -1,6 +1,4 @@ """Unit tests for pydot drawing functions.""" -import os -import tempfile from io import StringIO import pytest @@ -12,7 +10,9 @@ class TestPydot: - def pydot_checks(self, G, prog): + @pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph())) + @pytest.mark.parametrize("prog", ("neato", "dot")) + def test_pydot(self, G, prog, tmp_path): """ Validate :mod:`pydot`-based usage of the passed NetworkX graph with the passed basename of an external GraphViz command (e.g., `dot`, `neato`). @@ -39,7 +39,7 @@ def pydot_checks(self, G, prog): # Validate the original and resulting graphs to be the same. assert graphs_equal(G, G2) - fd, fname = tempfile.mkstemp() + fname = tmp_path / "out.dot" # Serialize this "pydot.Dot" instance to a temporary file in dot format P.write_raw(fname) @@ -78,15 +78,6 @@ def pydot_checks(self, G, prog): # Validate the original and resulting graphs to be the same. assert graphs_equal(G, Hin) - os.close(fd) - os.unlink(fname) - - def test_undirected(self): - self.pydot_checks(nx.Graph(), prog="neato") - - def test_directed(self): - self.pydot_checks(nx.DiGraph(), prog="dot") - def test_read_write(self): G = nx.MultiGraph() G.graph["name"] = "G" diff --git a/networkx/readwrite/tests/test_adjlist.py b/networkx/readwrite/tests/test_adjlist.py index aecfb87e940..c8d0f54654d 100644 --- a/networkx/readwrite/tests/test_adjlist.py +++ b/networkx/readwrite/tests/test_adjlist.py @@ -2,8 +2,6 @@ Unit tests for adjlist. """ import io -import os -import tempfile import pytest @@ -36,41 +34,35 @@ def test_read_multiline_adjlist_1(self): adj = {"1": {"3": {}, "2": {}}, "3": {"1": {}}, "2": {"1": {}}} assert graphs_equal(G, nx.Graph(adj)) - def test_unicode(self): + def test_unicode(self, tmp_path): G = nx.Graph() name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) G.add_edge(name1, "Radiohead", **{name2: 3}) - fd, fname = tempfile.mkstemp() + + fname = tmp_path / "adjlist.txt" nx.write_multiline_adjlist(G, fname) H = nx.read_multiline_adjlist(fname) assert graphs_equal(G, H) - os.close(fd) - os.unlink(fname) - def test_latin1_err(self): + def test_latin1_err(self, tmp_path): G = nx.Graph() name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) G.add_edge(name1, "Radiohead", **{name2: 3}) - fd, fname = tempfile.mkstemp() - pytest.raises( - UnicodeEncodeError, nx.write_multiline_adjlist, G, fname, encoding="latin-1" - ) - os.close(fd) - os.unlink(fname) + fname = tmp_path / "adjlist.txt" + with pytest.raises(UnicodeEncodeError): + nx.write_multiline_adjlist(G, fname, encoding="latin-1") - def test_latin1(self): + def test_latin1(self, tmp_path): G = nx.Graph() name1 = "Bj" + chr(246) + "rk" name2 = chr(220) + "ber" G.add_edge(name1, "Radiohead", **{name2: 3}) - fd, fname = tempfile.mkstemp() + fname = tmp_path / "adjlist.txt" nx.write_multiline_adjlist(G, fname, encoding="latin-1") H = nx.read_multiline_adjlist(fname, encoding="latin-1") assert graphs_equal(G, H) - os.close(fd) - os.unlink(fname) def test_parse_adjlist(self): lines = ["1 2 5", "2 3 4", "3 5", "4", "5"] @@ -81,32 +73,28 @@ def test_parse_adjlist(self): with pytest.raises(TypeError): nx.parse_adjlist(lines, nodetype=int) - def test_adjlist_graph(self): + def test_adjlist_graph(self, tmp_path): G = self.G - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "adjlist.txt" nx.write_adjlist(G, fname) H = nx.read_adjlist(fname) H2 = nx.read_adjlist(fname) assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_adjlist_digraph(self): + def test_adjlist_digraph(self, tmp_path): G = self.DG - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "adjlist.txt" nx.write_adjlist(G, fname) H = nx.read_adjlist(fname, create_using=nx.DiGraph()) H2 = nx.read_adjlist(fname, create_using=nx.DiGraph()) assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_adjlist_integers(self): - (fd, fname) = tempfile.mkstemp() + def test_adjlist_integers(self, tmp_path): + fname = tmp_path / "adjlist.txt" G = nx.convert_node_labels_to_integers(self.G) nx.write_adjlist(G, fname) H = nx.read_adjlist(fname, nodetype=int) @@ -114,32 +102,26 @@ def test_adjlist_integers(self): assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_adjlist_multigraph(self): + def test_adjlist_multigraph(self, tmp_path): G = self.XG - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "adjlist.txt" nx.write_adjlist(G, fname) H = nx.read_adjlist(fname, nodetype=int, create_using=nx.MultiGraph()) H2 = nx.read_adjlist(fname, nodetype=int, create_using=nx.MultiGraph()) assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_adjlist_multidigraph(self): + def test_adjlist_multidigraph(self, tmp_path): G = self.XDG - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "adjlist.txt" nx.write_adjlist(G, fname) H = nx.read_adjlist(fname, nodetype=int, create_using=nx.MultiDiGraph()) H2 = nx.read_adjlist(fname, nodetype=int, create_using=nx.MultiDiGraph()) assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) def test_adjlist_delimiter(self): fh = io.BytesIO() @@ -192,32 +174,28 @@ def test_parse_multiline_adjlist(self): with pytest.raises(TypeError): nx.parse_multiline_adjlist(iter(lines)) - def test_multiline_adjlist_graph(self): + def test_multiline_adjlist_graph(self, tmp_path): G = self.G - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "adjlist.txt" nx.write_multiline_adjlist(G, fname) H = nx.read_multiline_adjlist(fname) H2 = nx.read_multiline_adjlist(fname) assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_multiline_adjlist_digraph(self): + def test_multiline_adjlist_digraph(self, tmp_path): G = self.DG - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "adjlist.txt" nx.write_multiline_adjlist(G, fname) H = nx.read_multiline_adjlist(fname, create_using=nx.DiGraph()) H2 = nx.read_multiline_adjlist(fname, create_using=nx.DiGraph()) assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_multiline_adjlist_integers(self): - (fd, fname) = tempfile.mkstemp() + def test_multiline_adjlist_integers(self, tmp_path): + fname = tmp_path / "adjlist.txt" G = nx.convert_node_labels_to_integers(self.G) nx.write_multiline_adjlist(G, fname) H = nx.read_multiline_adjlist(fname, nodetype=int) @@ -225,12 +203,10 @@ def test_multiline_adjlist_integers(self): assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_multiline_adjlist_multigraph(self): + def test_multiline_adjlist_multigraph(self, tmp_path): G = self.XG - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "adjlist.txt" nx.write_multiline_adjlist(G, fname) H = nx.read_multiline_adjlist(fname, nodetype=int, create_using=nx.MultiGraph()) H2 = nx.read_multiline_adjlist( @@ -239,12 +215,10 @@ def test_multiline_adjlist_multigraph(self): assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_multiline_adjlist_multidigraph(self): + def test_multiline_adjlist_multidigraph(self, tmp_path): G = self.XDG - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "adjlist.txt" nx.write_multiline_adjlist(G, fname) H = nx.read_multiline_adjlist( fname, nodetype=int, create_using=nx.MultiDiGraph() @@ -255,8 +229,6 @@ def test_multiline_adjlist_multidigraph(self): assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) def test_multiline_adjlist_delimiter(self): fh = io.BytesIO() diff --git a/networkx/readwrite/tests/test_edgelist.py b/networkx/readwrite/tests/test_edgelist.py index 18b726f4380..29a536d53e9 100644 --- a/networkx/readwrite/tests/test_edgelist.py +++ b/networkx/readwrite/tests/test_edgelist.py @@ -2,8 +2,6 @@ Unit tests for edgelists. """ import io -import os -import tempfile import textwrap import pytest @@ -215,45 +213,39 @@ def test_write_edgelist_4(self): fh.seek(0) assert fh.read() == b"1 2 2.0\n2 3 3.0\n" - def test_unicode(self): + def test_unicode(self, tmp_path): G = nx.Graph() name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) G.add_edge(name1, "Radiohead", **{name2: 3}) - fd, fname = tempfile.mkstemp() + fname = tmp_path / "el.txt" nx.write_edgelist(G, fname) H = nx.read_edgelist(fname) assert graphs_equal(G, H) - os.close(fd) - os.unlink(fname) - def test_latin1_issue(self): + def test_latin1_issue(self, tmp_path): G = nx.Graph() name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) G.add_edge(name1, "Radiohead", **{name2: 3}) - fd, fname = tempfile.mkstemp() - pytest.raises( - UnicodeEncodeError, nx.write_edgelist, G, fname, encoding="latin-1" - ) - os.close(fd) - os.unlink(fname) + fname = tmp_path / "el.txt" + with pytest.raises(UnicodeEncodeError): + nx.write_edgelist(G, fname, encoding="latin-1") - def test_latin1(self): + def test_latin1(self, tmp_path): G = nx.Graph() name1 = "Bj" + chr(246) + "rk" name2 = chr(220) + "ber" G.add_edge(name1, "Radiohead", **{name2: 3}) - fd, fname = tempfile.mkstemp() + fname = tmp_path / "el.txt" + nx.write_edgelist(G, fname, encoding="latin-1") H = nx.read_edgelist(fname, encoding="latin-1") assert graphs_equal(G, H) - os.close(fd) - os.unlink(fname) - def test_edgelist_graph(self): + def test_edgelist_graph(self, tmp_path): G = self.G - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "el.txt" nx.write_edgelist(G, fname) H = nx.read_edgelist(fname) H2 = nx.read_edgelist(fname) @@ -261,12 +253,10 @@ def test_edgelist_graph(self): G.remove_node("g") # isolated nodes are not written in edgelist assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_edgelist_digraph(self): + def test_edgelist_digraph(self, tmp_path): G = self.DG - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "el.txt" nx.write_edgelist(G, fname) H = nx.read_edgelist(fname, create_using=nx.DiGraph()) H2 = nx.read_edgelist(fname, create_using=nx.DiGraph()) @@ -274,41 +264,33 @@ def test_edgelist_digraph(self): G.remove_node("g") # isolated nodes are not written in edgelist assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_edgelist_integers(self): + def test_edgelist_integers(self, tmp_path): G = nx.convert_node_labels_to_integers(self.G) - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "el.txt" nx.write_edgelist(G, fname) H = nx.read_edgelist(fname, nodetype=int) # isolated nodes are not written in edgelist G.remove_nodes_from(list(nx.isolates(G))) assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_edgelist_multigraph(self): + def test_edgelist_multigraph(self, tmp_path): G = self.XG - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "el.txt" nx.write_edgelist(G, fname) H = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) H2 = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_edgelist_multidigraph(self): + def test_edgelist_multidigraph(self, tmp_path): G = self.XDG - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "el.txt" nx.write_edgelist(G, fname) H = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiDiGraph()) H2 = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiDiGraph()) assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) diff --git a/networkx/readwrite/tests/test_gml.py b/networkx/readwrite/tests/test_gml.py index 50eff5c3617..f575ad269cf 100644 --- a/networkx/readwrite/tests/test_gml.py +++ b/networkx/readwrite/tests/test_gml.py @@ -1,8 +1,6 @@ import codecs import io import math -import os -import tempfile from ast import literal_eval from contextlib import contextmanager from textwrap import dedent @@ -165,17 +163,14 @@ def test_parse_gml(self): ("Node 3", "Node 1", {"label": "Edge from node 3 to node 1"}), ] - def test_read_gml(self): - (fd, fname) = tempfile.mkstemp() - fh = open(fname, "w") - fh.write(self.simple_data) - fh.close() + def test_read_gml(self, tmp_path): + fname = tmp_path / "test.gml" + with open(fname, "w") as fh: + fh.write(self.simple_data) Gin = nx.read_gml(fname, label="label") G = nx.parse_gml(self.simple_data, label="label") assert sorted(G.nodes(data=True)) == sorted(Gin.nodes(data=True)) assert sorted(G.edges(data=True)) == sorted(Gin.edges(data=True)) - os.close(fd) - os.unlink(fname) def test_labels_are_strings(self): # GML requires labels to be strings (i.e., in quotes) @@ -235,18 +230,18 @@ def test_tuplelabels(self, stringizer): ]""" assert data == answer - def test_quotes(self): + def test_quotes(self, tmp_path): # https://github.com/networkx/networkx/issues/1061 # Encoding quotes as HTML entities. G = nx.path_graph(1) G.name = "path_graph(1)" attr = 'This is "quoted" and this is a copyright: ' + chr(169) G.nodes[0]["demo"] = attr - fobj = tempfile.NamedTemporaryFile() - nx.write_gml(G, fobj) - fobj.seek(0) - # Should be bytes in 2.x and 3.x - data = fobj.read().strip().decode("ascii") + with open(tmp_path / "test.gml", "w+b") as fobj: + nx.write_gml(G, fobj) + fobj.seek(0) + # Should be bytes in 2.x and 3.x + data = fobj.read().strip().decode("ascii") answer = """graph [ name "path_graph(1)" node [ @@ -257,15 +252,15 @@ def test_quotes(self): ]""" assert data == answer - def test_unicode_node(self): + def test_unicode_node(self, tmp_path): node = "node" + chr(169) G = nx.Graph() G.add_node(node) - fobj = tempfile.NamedTemporaryFile() - nx.write_gml(G, fobj) - fobj.seek(0) - # Should be bytes in 2.x and 3.x - data = fobj.read().strip().decode("ascii") + with open(tmp_path / "test.gml", "w+b") as fobj: + nx.write_gml(G, fobj) + fobj.seek(0) + # Should be bytes in 2.x and 3.x + data = fobj.read().strip().decode("ascii") answer = """graph [ node [ id 0 @@ -274,15 +269,15 @@ def test_unicode_node(self): ]""" assert data == answer - def test_float_label(self): + def test_float_label(self, tmp_path): node = 1.0 G = nx.Graph() G.add_node(node) - fobj = tempfile.NamedTemporaryFile() - nx.write_gml(G, fobj) - fobj.seek(0) - # Should be bytes in 2.x and 3.x - data = fobj.read().strip().decode("ascii") + with open(tmp_path / "test.gml", "w+b") as fobj: + nx.write_gml(G, fobj) + fobj.seek(0) + # Should be bytes in 2.x and 3.x + data = fobj.read().strip().decode("ascii") answer = """graph [ node [ id 0 @@ -291,7 +286,7 @@ def test_float_label(self): ]""" assert data == answer - def test_special_float_label(self): + def test_special_float_label(self, tmp_path): special_floats = [float("nan"), float("+inf"), float("-inf")] try: import numpy as np @@ -307,12 +302,12 @@ def test_special_float_label(self): attrs = {edges[i]: value for i, value in enumerate(special_floats)} nx.set_edge_attributes(G, attrs, "edgefloat") - fobj = tempfile.NamedTemporaryFile() - nx.write_gml(G, fobj) - fobj.seek(0) - # Should be bytes in 2.x and 3.x - data = fobj.read().strip().decode("ascii") - answer = """graph [ + with open(tmp_path / "test.gml", "w+b") as fobj: + nx.write_gml(G, fobj) + fobj.seek(0) + # Should be bytes in 2.x and 3.x + data = fobj.read().strip().decode("ascii") + answer = """graph [ node [ id 0 label "0" @@ -374,24 +369,24 @@ def test_special_float_label(self): edgefloat -INF ] ]""" - assert data == answer - - fobj.seek(0) - graph = nx.read_gml(fobj) - for indx, value in enumerate(special_floats): - node_value = graph.nodes[str(indx)]["nodefloat"] - if math.isnan(value): - assert math.isnan(node_value) - else: - assert node_value == value + assert data == answer + + fobj.seek(0) + graph = nx.read_gml(fobj) + for indx, value in enumerate(special_floats): + node_value = graph.nodes[str(indx)]["nodefloat"] + if math.isnan(value): + assert math.isnan(node_value) + else: + assert node_value == value - edge = edges[indx] - string_edge = (str(edge[0]), str(edge[1])) - edge_value = graph.edges[string_edge]["edgefloat"] - if math.isnan(value): - assert math.isnan(edge_value) - else: - assert edge_value == value + edge = edges[indx] + string_edge = (str(edge[0]), str(edge[1])) + edge_value = graph.edges[string_edge]["edgefloat"] + if math.isnan(value): + assert math.isnan(edge_value) + else: + assert edge_value == value def test_name(self): G = nx.parse_gml('graph [ name "x" node [ id 0 label "x" ] ]') @@ -480,13 +475,13 @@ def test_escape_unescape(self): ) assert answer == gml - def test_exceptions(self): + def test_exceptions(self, tmp_path): pytest.raises(ValueError, literal_destringizer, "(") pytest.raises(ValueError, literal_destringizer, "frozenset([1, 2, 3])") pytest.raises(ValueError, literal_destringizer, literal_destringizer) pytest.raises(ValueError, literal_stringizer, frozenset([1, 2, 3])) pytest.raises(ValueError, literal_stringizer, literal_stringizer) - with tempfile.TemporaryFile() as f: + with open(tmp_path / "test.gml", "w+b") as f: f.write(codecs.BOM_UTF8 + b"graph[]") f.seek(0) pytest.raises(nx.NetworkXError, nx.read_gml, f) @@ -584,7 +579,7 @@ def test_label_kwarg(self): labels = [G.nodes[n]["label"] for n in sorted(G.nodes)] assert labels == ["Node 1", "Node 2", "Node 3"] - def test_outofrange_integers(self): + def test_outofrange_integers(self, tmp_path): # GML restricts integers to 32 signed bits. # Check that we honor this restriction on export G = nx.Graph() @@ -601,19 +596,15 @@ def test_outofrange_integers(self): } G.add_node("Node", **numbers) - fd, fname = tempfile.mkstemp() - try: - nx.write_gml(G, fname) - # Check that the export wrote the nonfitting numbers as strings - G2 = nx.read_gml(fname) - for attr, value in G2.nodes["Node"].items(): - if attr == "toosmall" or attr == "toobig": - assert type(value) == str - else: - assert type(value) == int - finally: - os.close(fd) - os.unlink(fname) + fname = tmp_path / "test.gml" + nx.write_gml(G, fname) + # Check that the export wrote the nonfitting numbers as strings + G2 = nx.read_gml(fname) + for attr, value in G2.nodes["Node"].items(): + if attr == "toosmall" or attr == "toobig": + assert type(value) == str + else: + assert type(value) == int def test_multiline(self): # example from issue #6836 diff --git a/networkx/readwrite/tests/test_graph6.py b/networkx/readwrite/tests/test_graph6.py index 062a96f0b12..a80326946c6 100644 --- a/networkx/readwrite/tests/test_graph6.py +++ b/networkx/readwrite/tests/test_graph6.py @@ -1,4 +1,3 @@ -import tempfile from io import BytesIO import pytest @@ -104,8 +103,8 @@ def test_roundtrip(self): assert nodes_equal(G.nodes(), H.nodes()) assert edges_equal(G.edges(), H.edges()) - def test_write_path(self): - with tempfile.NamedTemporaryFile() as f: + def test_write_path(self, tmp_path): + with open(tmp_path / "test.g6", "w+b") as f: g6.write_graph6_file(nx.null_graph(), f) f.seek(0) assert f.read() == b">>graph6<>sparse6<<:?\n" - fh.close() - import os - - os.remove(fullfilename) + with open(fullfilename, mode="rb") as fh: + assert fh.read() == b">>sparse6<<:?\n" From 8682e1b47d72530b3200e2055a06d7a287ee0f7f Mon Sep 17 00:00:00 2001 From: Maximilian Seeliger Date: Tue, 16 Jan 2024 04:18:16 +0100 Subject: [PATCH 042/129] Fixed typo in tensor product documentation (Fixes #7228) (#7229) Fixed typo --- networkx/algorithms/operators/product.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/networkx/algorithms/operators/product.py b/networkx/algorithms/operators/product.py index 9a8063c91e2..3128f82b40b 100644 --- a/networkx/algorithms/operators/product.py +++ b/networkx/algorithms/operators/product.py @@ -128,7 +128,7 @@ def tensor_product(G, H): r"""Returns the tensor product of G and H. The tensor product $P$ of the graphs $G$ and $H$ has a node set that - is the tensor product of the node sets, $V(P)=V(G) \times V(H)$. + is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$. $P$ has an edge $((u,v), (x,y))$ if and only if $(u,x)$ is an edge in $G$ and $(v,y)$ is an edge in $H$. From 4a0a6754a58ed46ffa0d193e2b4951f9884d206e Mon Sep 17 00:00:00 2001 From: Dishie Vinchhi <95416713+Dishie2498@users.noreply.github.com> Date: Thu, 18 Jan 2024 00:22:15 +0530 Subject: [PATCH 043/129] updated test_directed_edge_swap to fix #5814 (#6426) * rewrite test to ensure edges are swapped Co-authored-by: Dan Schult --- networkx/algorithms/swap.py | 2 ++ networkx/algorithms/tests/test_swap.py | 38 ++++++++++++++++++++------ 2 files changed, 32 insertions(+), 8 deletions(-) diff --git a/networkx/algorithms/swap.py b/networkx/algorithms/swap.py index 1b6133e4a69..6c4b698f1ad 100644 --- a/networkx/algorithms/swap.py +++ b/networkx/algorithms/swap.py @@ -57,6 +57,8 @@ def directed_edge_swap(G, *, nswap=1, max_tries=100, seed=None): The graph G is modified in place. + A later swap is allowed to undo a previous swap. + References ---------- .. [1] Erdős, Péter L., et al. “A Simple Havel-Hakimi Type Algorithm to Realize diff --git a/networkx/algorithms/tests/test_swap.py b/networkx/algorithms/tests/test_swap.py index 49dd5f8e8c7..378c8ecfa61 100644 --- a/networkx/algorithms/tests/test_swap.py +++ b/networkx/algorithms/tests/test_swap.py @@ -2,14 +2,36 @@ import networkx as nx - -def test_directed_edge_swap(): - graph = nx.path_graph(200, create_using=nx.DiGraph) - in_degrees = sorted((n, d) for n, d in graph.in_degree()) - out_degrees = sorted((n, d) for n, d in graph.out_degree()) - G = nx.directed_edge_swap(graph, nswap=40, max_tries=500, seed=1) - assert in_degrees == sorted((n, d) for n, d in G.in_degree()) - assert out_degrees == sorted((n, d) for n, d in G.out_degree()) +cycle = nx.cycle_graph(5, create_using=nx.DiGraph) +tree = nx.random_tree(10, create_using=nx.DiGraph) +path = nx.path_graph(5, create_using=nx.DiGraph) +binomial = nx.binomial_tree(3, create_using=nx.DiGraph) +HH = nx.directed_havel_hakimi_graph([1, 2, 1, 2, 2, 2], [3, 1, 0, 1, 2, 3]) +balanced_tree = nx.balanced_tree(2, 3, create_using=nx.DiGraph) + + +@pytest.mark.parametrize("G", [path, binomial, HH, cycle, tree, balanced_tree]) +def test_directed_edge_swap(G): + in_degree = set(G.in_degree) + out_degree = set(G.out_degree) + edges = set(G.edges) + nx.directed_edge_swap(G, nswap=1, max_tries=100, seed=1) + assert in_degree == set(G.in_degree) + assert out_degree == set(G.out_degree) + assert edges != set(G.edges) + assert 3 == sum(e not in edges for e in G.edges) + + +def test_directed_edge_swap_undo_previous_swap(): + G = nx.DiGraph(nx.path_graph(4).edges) # only 1 swap possible + edges = set(G.edges) + nx.directed_edge_swap(G, nswap=2, max_tries=100) + assert edges == set(G.edges) + + nx.directed_edge_swap(G, nswap=1, max_tries=100, seed=1) + assert {(0, 2), (1, 3), (2, 1)} == set(G.edges) + nx.directed_edge_swap(G, nswap=1, max_tries=100, seed=1) + assert edges == set(G.edges) def test_edge_cases_directed_edge_swap(): From 40e84a489c9de00711b844392eb8087806d93f52 Mon Sep 17 00:00:00 2001 From: Purvi Chaurasia <97350598+PurviChaurasia@users.noreply.github.com> Date: Thu, 18 Jan 2024 00:25:34 +0530 Subject: [PATCH 044/129] Add example for cycle detection (#6560) * Fix negative edge cycle function raising exception for empty graph and added relevant test function * Unresolved change * Added example script to visualize GOT network * Added example script to visualize cycle detection * Deleted got.py * Minor tweaks to example. * Update examples/algorithms/plot_cycle_detection.py --------- Co-authored-by: Ross Barnowski Co-authored-by: Dan Schult --- examples/algorithms/plot_cycle_detection.py | 29 +++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 examples/algorithms/plot_cycle_detection.py diff --git a/examples/algorithms/plot_cycle_detection.py b/examples/algorithms/plot_cycle_detection.py new file mode 100644 index 00000000000..cd22f750736 --- /dev/null +++ b/examples/algorithms/plot_cycle_detection.py @@ -0,0 +1,29 @@ +""" +=============== +Cycle Detection +=============== + +This example demonstrates the use of ``nx.find_cycle`` to find a single, +arbitrary cycle in a graph. + +Other functions like ``nx.simple_cycles`` and ``nx.cycle_basis`` can be used to +find all cycles or a cycle basis. +""" +import networkx as nx +import matplotlib.pyplot as plt + +# Create a simple directed graph with a cycle +G = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 2), (3, 5), (3, 2), (1, 5)]) + +# Draw the graph +pos = nx.spring_layout(G, seed=8020) +nx.draw(G, pos, with_labels=True) + +# The `orientation` parameter can be used to determine how directed edges are +# treated and the reporting of edge direction in the cycle +cycle = nx.find_cycle(G, orientation="original") +print(cycle) + +# Highlight the cycle in red +nx.draw_networkx_edges(G, pos, edgelist=cycle, edge_color="r", width=2) +plt.show() From 7fc4fa51875d9e326416f998c51f208442d55dad Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Thu, 18 Jan 2024 00:38:08 -0800 Subject: [PATCH 045/129] Bump copyright year for 2024. (#7232) --- LICENSE.txt | 2 +- README.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/LICENSE.txt b/LICENSE.txt index 42b6f17a65e..100b4bffb00 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -2,7 +2,7 @@ NetworkX is distributed with the 3-clause BSD license. :: - Copyright (C) 2004-2023, NetworkX Developers + Copyright (C) 2004-2024, NetworkX Developers Aric Hagberg Dan Schult Pieter Swart diff --git a/README.rst b/README.rst index 11bd027f974..9e4be00835b 100644 --- a/README.rst +++ b/README.rst @@ -67,7 +67,7 @@ License Released under the 3-Clause BSD license (see `LICENSE.txt`):: - Copyright (C) 2004-2023 NetworkX Developers + Copyright (C) 2004-2024 NetworkX Developers Aric Hagberg Dan Schult Pieter Swart From 3eecad33aebd83ddd0c03e69c9cb02f84d8ad451 Mon Sep 17 00:00:00 2001 From: Mridul Seth Date: Sat, 20 Jan 2024 02:05:52 +0400 Subject: [PATCH 046/129] Divisive community algorithms (#5830) * Divisive community algorithms * Cleanup divisive community algos * Change number_of_partitions to number of sets, doc_string tweaks, examples, etc * Rewrite so CF Btwness is called on each component separately * importorskip numpy for edge_current_flow_betweenness * fix isort trouble * Apply suggestions from code review Co-authored-by: Ross Barnowski * FEAT: adding a PoC of creating an internal cache --------- Co-authored-by: Benjamin Edwards Co-authored-by: Dan Schult Co-authored-by: Ross Barnowski --- doc/reference/algorithms/community.rst | 9 + networkx/algorithms/community/__init__.py | 1 + networkx/algorithms/community/divisive.py | 194 ++++++++++++++++++ .../community/tests/test_divisive.py | 109 ++++++++++ 4 files changed, 313 insertions(+) create mode 100644 networkx/algorithms/community/divisive.py create mode 100644 networkx/algorithms/community/tests/test_divisive.py diff --git a/doc/reference/algorithms/community.rst b/doc/reference/algorithms/community.rst index 1d107099790..b031bcb349b 100644 --- a/doc/reference/algorithms/community.rst +++ b/doc/reference/algorithms/community.rst @@ -14,6 +14,15 @@ Bipartitions kernighan_lin_bisection +Divisive Communities +--------------------- +.. automodule:: networkx.algorithms.community.divisive +.. autosummary:: + :toctree: generated/ + + edge_betweenness_partition + edge_current_flow_betweenness_partition + K-Clique -------- .. automodule:: networkx.algorithms.community.kclique diff --git a/networkx/algorithms/community/__init__.py b/networkx/algorithms/community/__init__.py index fa782201ddc..40549aff238 100644 --- a/networkx/algorithms/community/__init__.py +++ b/networkx/algorithms/community/__init__.py @@ -14,6 +14,7 @@ """ from networkx.algorithms.community.asyn_fluid import * from networkx.algorithms.community.centrality import * +from networkx.algorithms.community.divisive import * from networkx.algorithms.community.kclique import * from networkx.algorithms.community.kernighan_lin import * from networkx.algorithms.community.label_propagation import * diff --git a/networkx/algorithms/community/divisive.py b/networkx/algorithms/community/divisive.py new file mode 100644 index 00000000000..7dbf70ca3fd --- /dev/null +++ b/networkx/algorithms/community/divisive.py @@ -0,0 +1,194 @@ +import functools + +import networkx as nx + +__all__ = [ + "edge_betweenness_partition", + "edge_current_flow_betweenness_partition", +] + + +def edge_betweenness_partition(G, number_of_sets, *, weight=None): + """Partition created by iteratively removing the highest edge betweenness edge. + + This algorithm works by calculating the edge betweenness for all + edges and removing the edge with the highest value. It is then + determined whether the graph has been broken into at least + `number_of_sets` connected components. + If not the process is repeated. + + Parameters + ---------- + G : NetworkX Graph, DiGraph or MultiGraph + Graph to be partitioned + + number_of_sets : int + Number of sets in the desired partition of the graph + + weight : key, optional, default=None + The key to use if using weights for edge betweenness calculation + + Returns + ------- + C : list of sets + Partition of the nodes of G + + Raises + ------ + NetworkXError + If number_of_sets is <= 0 or if number_of_sets > len(G) + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> part = nx.community.edge_betweenness_partition(G, 2) + >>> {0, 1, 3, 4, 5, 6, 7, 10, 11, 12, 13, 16, 17, 19, 21} in part + True + >>> {2, 8, 9, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33} in part + True + + See Also + -------- + edge_current_flow_betweenness_partition + + Notes + ----- + This algorithm is fairly slow, as both the calculation of connected + components and edge betweenness relies on all pairs shortest + path algorithms. They could potentially be combined to cut down + on overall computation time. + + References + ---------- + .. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports + Volume 486, Issue 3-5 p. 75-174 + http://arxiv.org/abs/0906.0612 + """ + if number_of_sets <= 0: + raise nx.NetworkXError("number_of_sets must be >0") + if number_of_sets == 1: + return [set(G)] + if number_of_sets == len(G): + return [{n} for n in G] + if number_of_sets > len(G): + raise nx.NetworkXError("number_of_sets must be <= len(G)") + + H = G.copy() + partition = list(nx.connected_components(H)) + while len(partition) < number_of_sets: + ranking = nx.edge_betweenness_centrality(H, weight=weight) + edge = max(ranking, key=ranking.get) + H.remove_edge(*edge) + partition = list(nx.connected_components(H)) + return partition + + +def edge_current_flow_betweenness_partition(G, number_of_sets, *, weight=None): + """Partition created by removing the highest edge current flow betweenness edge. + + This algorithm works by calculating the edge current flow + betweenness for all edges and removing the edge with the + highest value. It is then determined whether the graph has + been broken into at least `number_of_sets` connected + components. If not the process is repeated. + + Parameters + ---------- + G : NetworkX Graph, DiGraph or MultiGraph + Graph to be partitioned + + number_of_sets : int + Number of sets in the desired partition of the graph + + weight : key, optional (default=None) + The edge attribute key to use as weights for + edge current flow betweenness calculations + + Returns + ------- + C : list of sets + Partition of G + + Raises + ------ + NetworkXError + If number_of_sets is <= 0 or number_of_sets > len(G) + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> part = nx.community.edge_current_flow_betweenness_partition(G, 2) + >>> {0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 16, 17, 19, 21} in part + True + >>> {8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33} in part + True + + + See Also + -------- + edge_betweenness_partition + + Notes + ----- + This algorithm is extremely slow, as the recalculation of the edge + current flow betweenness is extremely slow. + + References + ---------- + .. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports + Volume 486, Issue 3-5 p. 75-174 + http://arxiv.org/abs/0906.0612 + """ + if number_of_sets <= 0: + raise nx.NetworkXError("number_of_sets must be >0") + elif number_of_sets == 1: + return [set(G)] + elif number_of_sets == len(G): + return [{n} for n in G] + elif number_of_sets > len(G): + raise nx.NetworkXError("number_of_sets must be <= len(G)") + + rank = functools.partial( + nx.edge_current_flow_betweenness_centrality, normalized=False, weight=weight + ) + + # current flow requires a connected network so we track the components explicitly + H = G.copy() + partition = list(nx.connected_components(H)) + if len(partition) > 1: + Hcc_subgraphs = [H.subgraph(cc).copy() for cc in partition] + else: + Hcc_subgraphs = [H] + + ranking = {} + for Hcc in Hcc_subgraphs: + ranking.update(rank(Hcc)) + + while len(partition) < number_of_sets: + edge = max(ranking, key=ranking.get) + for cc, Hcc in zip(partition, Hcc_subgraphs): + if edge[0] in cc: + Hcc.remove_edge(*edge) + del ranking[edge] + splitcc_list = list(nx.connected_components(Hcc)) + if len(splitcc_list) > 1: + # there are 2 connected components. split off smaller one + cc_new = min(splitcc_list, key=len) + Hcc_new = Hcc.subgraph(cc_new).copy() + # update edge rankings for Hcc_new + newranks = rank(Hcc_new) + for e, r in newranks.items(): + ranking[e if e in ranking else e[::-1]] = r + # append new cc and Hcc to their lists. + partition.append(cc_new) + Hcc_subgraphs.append(Hcc_new) + + # leave existing cc and Hcc in their lists, but shrink them + Hcc.remove_nodes_from(cc_new) + cc.difference_update(cc_new) + # update edge rankings for Hcc whether it was split or not + newranks = rank(Hcc) + for e, r in newranks.items(): + ranking[e if e in ranking else e[::-1]] = r + break + return partition diff --git a/networkx/algorithms/community/tests/test_divisive.py b/networkx/algorithms/community/tests/test_divisive.py new file mode 100644 index 00000000000..f3857613db2 --- /dev/null +++ b/networkx/algorithms/community/tests/test_divisive.py @@ -0,0 +1,109 @@ +import pytest + +import networkx as nx + + +def test_edge_betweenness_partition(): + G = nx.barbell_graph(3, 0) + C = nx.community.edge_betweenness_partition(G, 2) + answer = [{0, 1, 2}, {3, 4, 5}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + G = nx.barbell_graph(3, 1) + C = nx.community.edge_betweenness_partition(G, 3) + answer = [{0, 1, 2}, {4, 5, 6}, {3}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_betweenness_partition(G, 7) + answer = [{n} for n in G] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_betweenness_partition(G, 1) + assert C == [set(G)] + + C = nx.community.edge_betweenness_partition(G, 1, weight="weight") + assert C == [set(G)] + + with pytest.raises(nx.NetworkXError): + nx.community.edge_betweenness_partition(G, 0) + + with pytest.raises(nx.NetworkXError): + nx.community.edge_betweenness_partition(G, -1) + + with pytest.raises(nx.NetworkXError): + nx.community.edge_betweenness_partition(G, 10) + + +def test_edge_current_flow_betweenness_partition(): + pytest.importorskip("numpy") + + G = nx.barbell_graph(3, 0) + C = nx.community.edge_current_flow_betweenness_partition(G, 2) + answer = [{0, 1, 2}, {3, 4, 5}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + G = nx.barbell_graph(3, 1) + C = nx.community.edge_current_flow_betweenness_partition(G, 2) + answer = [{0, 1, 2, 3}, {4, 5, 6}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_current_flow_betweenness_partition(G, 3) + answer = [{0, 1, 2}, {4, 5, 6}, {3}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_current_flow_betweenness_partition(G, 4) + answer = [{1, 2}, {4, 5, 6}, {3}, {0}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_current_flow_betweenness_partition(G, 5) + answer = [{1, 2}, {5, 6}, {3}, {0}, {4}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_current_flow_betweenness_partition(G, 6) + answer = [{2}, {5, 6}, {3}, {0}, {4}, {1}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_current_flow_betweenness_partition(G, 7) + answer = [{n} for n in G] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_current_flow_betweenness_partition(G, 1) + assert C == [set(G)] + + C = nx.community.edge_current_flow_betweenness_partition(G, 1, weight="weight") + assert C == [set(G)] + + with pytest.raises(nx.NetworkXError): + nx.community.edge_current_flow_betweenness_partition(G, 0) + + with pytest.raises(nx.NetworkXError): + nx.community.edge_current_flow_betweenness_partition(G, -1) + + with pytest.raises(nx.NetworkXError): + nx.community.edge_current_flow_betweenness_partition(G, 10) + + N = 10 + G = nx.empty_graph(N) + for i in range(2, N - 1): + C = nx.community.edge_current_flow_betweenness_partition(G, i) + assert C == [{n} for n in G] From 55ed94797df50f34597e1bde95ab7cd06e4cc2f2 Mon Sep 17 00:00:00 2001 From: Dan Schult Date: Fri, 19 Jan 2024 18:29:27 -0500 Subject: [PATCH 047/129] add seed to graph creation (#7241) * add seed to graph creation * revert binomial_tree as no seed --- networkx/algorithms/tests/test_swap.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/networkx/algorithms/tests/test_swap.py b/networkx/algorithms/tests/test_swap.py index 378c8ecfa61..c4aeb0682e0 100644 --- a/networkx/algorithms/tests/test_swap.py +++ b/networkx/algorithms/tests/test_swap.py @@ -3,7 +3,7 @@ import networkx as nx cycle = nx.cycle_graph(5, create_using=nx.DiGraph) -tree = nx.random_tree(10, create_using=nx.DiGraph) +tree = nx.random_tree(10, create_using=nx.DiGraph, seed=42) path = nx.path_graph(5, create_using=nx.DiGraph) binomial = nx.binomial_tree(3, create_using=nx.DiGraph) HH = nx.directed_havel_hakimi_graph([1, 2, 1, 2, 2, 2], [3, 1, 0, 1, 2, 3]) From 838fc93423acb7969507ac01e0ee3dad0f4153e5 Mon Sep 17 00:00:00 2001 From: Dan Schult Date: Sat, 20 Jan 2024 09:47:47 -0500 Subject: [PATCH 048/129] add seed to tests of fast_label_propatation_communities (#7242) --- .../community/tests/test_label_propagation.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/networkx/algorithms/community/tests/test_label_propagation.py b/networkx/algorithms/community/tests/test_label_propagation.py index 9a0b3d89b6e..d86b1d7d47f 100644 --- a/networkx/algorithms/community/tests/test_label_propagation.py +++ b/networkx/algorithms/community/tests/test_label_propagation.py @@ -163,7 +163,7 @@ class TestFastLabelPropagationCommunities: N = 100 # number of nodes K = 15 # average node degree - def _check_communities(self, G, truth, weight=None, seed=None): + def _check_communities(self, G, truth, weight=None, seed=42): C = nx.community.fast_label_propagation_communities(G, weight=weight, seed=seed) assert {frozenset(c) for c in C} == truth @@ -203,10 +203,9 @@ def test_disjoin_cliques(self): self._check_communities(G, truth) def test_ring_of_cliques(self): - G = nx.ring_of_cliques(self.N, self.K) - truth = { - frozenset([self.K * i + k for k in range(self.K)]) for i in range(self.N) - } + N, K = self.N, self.K + G = nx.ring_of_cliques(N, K) + truth = {frozenset([K * i + k for k in range(K)]) for i in range(N)} self._check_communities(G, truth) def test_larger_graph(self): @@ -238,3 +237,5 @@ def test_seed_argument(self): C = nx.community.fast_label_propagation_communities(G, seed=2023) truth = {frozenset(c) for c in C} self._check_communities(G, truth, seed=2023) + # smoke test that seed=None works + C = nx.community.fast_label_propagation_communities(G, seed=None) From d06bbe06241d6b310b840825053a2bbfa096f994 Mon Sep 17 00:00:00 2001 From: Maximilian Seeliger Date: Wed, 24 Jan 2024 00:28:03 +0100 Subject: [PATCH 049/129] Added feature modular graph product (#7227) * Implementation of modular product * Improved performance by reducing number of loops * Added doc reference and tests Co-authored-by: Dan Schult Co-authored-by: Ross Barnowski --- doc/reference/algorithms/operators.rst | 1 + networkx/algorithms/operators/product.py | 94 +++++++++++++++++++ .../operators/tests/test_product.py | 56 +++++++++++ 3 files changed, 151 insertions(+) diff --git a/doc/reference/algorithms/operators.rst b/doc/reference/algorithms/operators.rst index 13632e270e6..c97c96c3c10 100644 --- a/doc/reference/algorithms/operators.rst +++ b/doc/reference/algorithms/operators.rst @@ -44,3 +44,4 @@ Operators tensor_product power corona_product + modular_product diff --git a/networkx/algorithms/operators/product.py b/networkx/algorithms/operators/product.py index 3128f82b40b..ee38fd7a18c 100644 --- a/networkx/algorithms/operators/product.py +++ b/networkx/algorithms/operators/product.py @@ -14,6 +14,7 @@ "power", "rooted_product", "corona_product", + "modular_product", ] _G_H = {"G": 0, "H": 1} @@ -532,3 +533,96 @@ def corona_product(G, H): GH.add_edges_from((G_node, (G_node, H_node)) for H_node in H) return GH + + +@nx._dispatchable(graphs=_G_H) +def modular_product(G, H): + r"""Returns the Modular product of G and H. + + The modular product of `G` and `H` is the graph $M = G \nabla H$, + consisting of the node set $V(M) = V(G) \times V(H)$ that is the Cartesian + product of the node sets of `G` and `H`. Further, M contains an edge ((u, v), (x, y)): + + - if u is adjacent to x in `G` and v is adjacent to y in `H`, or + - if u is not adjacent to x in `G` and v is not adjacent to y in `H`. + + More formally + :: + + E(M) = {((u, v), (x, y)) | ((u, x) in E(G) and (v, y) in E(H)) or + ((u, x) not in E(G) and (v, y) not in E(H))} + Parameters + ---------- + G, H: NetworkX graphs + The graphs to take the modular product of. + + Returns + ------- + M: NetworkX graph + The Modular product of G and H. + + Raises + ------ + NetworkXNotImplemented + If G is not a simple graph. + + Examples + -------- + >>> G = nx.cycle_graph(4) + >>> H = nx.path_graph(2) + >>> M = nx.modular_product(G, H) + >>> list(M) + [(0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1), (3, 0), (3, 1)] + >>> print(M) + Graph with 8 nodes and 8 edges + + Notes + ----- + The *modular product* is defined in [1]_ and was first + introduced as the *weak modular product*. + + The modular product reduces the problem of counting isomorphic subgraphs + in `G` and `H` to the problem of counting cliques in M. The subgraphs of + `G` and `H` that are induced by the nodes of a clique in M are + isomorphic [2]_ [3]_. + + References + ---------- + .. [1] R. Hammack, W. Imrich, and S. Klavžar, + "Handbook of Product Graphs", CRC Press, 2011. + + .. [2] H. G. Barrow and R. M. Burstall, + "Subgraph isomorphism, matching relational structures and maximal + cliques", Information Processing Letters, vol. 4, issue 4, pp. 83-84, + 1976, https://doi.org/10.1016/0020-0190(76)90049-1. + + .. [3] V. G. Vizing, "Reduction of the problem of isomorphism and isomorphic + entrance to the task of finding the nondensity of a graph." Proc. Third + All-Union Conference on Problems of Theoretical Cybernetics. 1974. + """ + if G.is_directed() or H.is_directed(): + raise nx.NetworkXNotImplemented( + "Modular product not implemented for directed graphs" + ) + if G.is_multigraph() or H.is_multigraph(): + raise nx.NetworkXNotImplemented( + "Modular product not implemented for multigraphs" + ) + + GH = _init_product_graph(G, H) + GH.add_nodes_from(_node_product(G, H)) + + for u, v, c in G.edges(data=True): + for x, y, d in H.edges(data=True): + GH.add_edge((u, x), (v, y), **_dict_product(c, d)) + GH.add_edge((v, x), (u, y), **_dict_product(c, d)) + + G = nx.complement(G) + H = nx.complement(H) + + for u, v, c in G.edges(data=True): + for x, y, d in H.edges(data=True): + GH.add_edge((u, x), (v, y), **_dict_product(c, d)) + GH.add_edge((v, x), (u, y), **_dict_product(c, d)) + + return GH diff --git a/networkx/algorithms/operators/tests/test_product.py b/networkx/algorithms/operators/tests/test_product.py index 50bc7b7e59e..2eb788bc302 100644 --- a/networkx/algorithms/operators/tests/test_product.py +++ b/networkx/algorithms/operators/tests/test_product.py @@ -433,3 +433,59 @@ def test_corona_product(): C = nx.corona_product(G, H) assert len(C) == (len(G) * len(H)) + len(G) assert C.size() == G.size() + len(G) * H.size() + len(G) * len(H) + + +def test_modular_product(): + G = nx.path_graph(3) + H = nx.path_graph(4) + M = nx.modular_product(G, H) + assert len(M) == len(G) * len(H) + + assert edges_equal( + list(M.edges()), + [ + ((0, 0), (1, 1)), + ((0, 0), (2, 2)), + ((0, 0), (2, 3)), + ((0, 1), (1, 0)), + ((0, 1), (1, 2)), + ((0, 1), (2, 3)), + ((0, 2), (1, 1)), + ((0, 2), (1, 3)), + ((0, 2), (2, 0)), + ((0, 3), (1, 2)), + ((0, 3), (2, 0)), + ((0, 3), (2, 1)), + ((1, 0), (2, 1)), + ((1, 1), (2, 0)), + ((1, 1), (2, 2)), + ((1, 2), (2, 1)), + ((1, 2), (2, 3)), + ((1, 3), (2, 2)), + ], + ) + + +def test_modular_product_raises(): + G = nx.Graph([(0, 1), (1, 2), (2, 0)]) + H = nx.Graph([(0, 1), (1, 2), (2, 0)]) + DG = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) + DH = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(G, DH) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(DG, H) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(DG, DH) + + MG = nx.MultiGraph([(0, 1), (1, 2), (2, 0), (0, 1)]) + MH = nx.MultiGraph([(0, 1), (1, 2), (2, 0), (0, 1)]) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(G, MH) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(MG, H) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(MG, MH) + with pytest.raises(nx.NetworkXNotImplemented): + # check multigraph with no multiedges + nx.modular_product(nx.MultiGraph(G), H) From d6569adf7c224a9c85c0e4e4c92435f772f73582 Mon Sep 17 00:00:00 2001 From: Aditi Juneja <91629733+Schefflera-Arboricola@users.noreply.github.com> Date: Wed, 24 Jan 2024 23:39:47 +0530 Subject: [PATCH 050/129] ENH : added `sort_neighbors` to all functions in `depth_first_search.py` (#7196) Added sort_children to all dfs funcs, added dfs tests, updated links in docs of dfs. Also updated the parameter description for sort_neighbors in all traversal functions that support it --------- Co-authored-by: Ross Barnowski --- .../traversal/breadth_first_search.py | 39 ++++--- .../traversal/depth_first_search.py | 109 ++++++++++++++---- .../algorithms/traversal/tests/test_dfs.py | 54 +++++++++ 3 files changed, 159 insertions(+), 43 deletions(-) diff --git a/networkx/algorithms/traversal/breadth_first_search.py b/networkx/algorithms/traversal/breadth_first_search.py index 09bc695759a..a4e5c951727 100644 --- a/networkx/algorithms/traversal/breadth_first_search.py +++ b/networkx/algorithms/traversal/breadth_first_search.py @@ -1,5 +1,4 @@ """Basic algorithms for breadth-first searching the nodes of a graph.""" -import math from collections import deque import networkx as nx @@ -44,7 +43,7 @@ def generic_bfs_edges(G, source, neighbors=None, depth_limit=None, sort_neighbor depth_limit : int, optional(default=len(G)) Specify the maximum search depth. - sort_neighbors : Callable + sort_neighbors : Callable (default=None) .. deprecated:: 3.2 @@ -52,9 +51,9 @@ def generic_bfs_edges(G, source, neighbors=None, depth_limit=None, sort_neighbor version 3.4. A custom (e.g. sorted) ordering of neighbors can be specified with the `neighbors` parameter. - A function that takes the list of neighbors of a given node as input, - and returns an iterator over these neighbors but with a custom - ordering. + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. Yields ------ @@ -149,9 +148,10 @@ def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None): depth_limit : int, optional(default=len(G)) Specify the maximum search depth - sort_neighbors : function - A function that takes the list of neighbors of given node as input, and - returns an *iterator* over these neighbors but with custom ordering. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. Yields ------ @@ -210,7 +210,7 @@ def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None): else: successors = G.neighbors - if callable(sort_neighbors): + if sort_neighbors is not None: yield from generic_bfs_edges( G, source, lambda node: iter(sort_neighbors(successors(node))), depth_limit ) @@ -236,9 +236,10 @@ def bfs_tree(G, source, reverse=False, depth_limit=None, sort_neighbors=None): depth_limit : int, optional(default=len(G)) Specify the maximum search depth - sort_neighbors : function - A function that takes the list of neighbors of given node as input, and - returns an *iterator* over these neighbors but with custom ordering. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. Returns ------- @@ -299,9 +300,10 @@ def bfs_predecessors(G, source, depth_limit=None, sort_neighbors=None): depth_limit : int, optional(default=len(G)) Specify the maximum search depth - sort_neighbors : function - A function that takes the list of neighbors of given node as input, and - returns an *iterator* over these neighbors but with custom ordering. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. Returns ------- @@ -364,9 +366,10 @@ def bfs_successors(G, source, depth_limit=None, sort_neighbors=None): depth_limit : int, optional(default=len(G)) Specify the maximum search depth - sort_neighbors : function - A function that takes the list of neighbors of given node as input, and - returns an *iterator* over these neighbors but with custom ordering. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. Returns ------- diff --git a/networkx/algorithms/traversal/depth_first_search.py b/networkx/algorithms/traversal/depth_first_search.py index 843a793aed6..8a06a96c7ac 100644 --- a/networkx/algorithms/traversal/depth_first_search.py +++ b/networkx/algorithms/traversal/depth_first_search.py @@ -15,7 +15,7 @@ @nx._dispatchable -def dfs_edges(G, source=None, depth_limit=None): +def dfs_edges(G, source=None, depth_limit=None, *, sort_neighbors=None): """Iterate over edges in a depth-first-search (DFS). Perform a depth-first-search over the nodes of `G` and yield @@ -33,6 +33,11 @@ def dfs_edges(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Yields ------ edge: 2-tuple of nodes @@ -78,12 +83,18 @@ def dfs_edges(G, source=None, depth_limit=None): if depth_limit is None: depth_limit = len(G) + get_children = ( + G.neighbors + if sort_neighbors is None + else lambda n: iter(sort_neighbors(G.neighbors(n))) + ) + visited = set() for start in nodes: if start in visited: continue visited.add(start) - stack = [(start, iter(G[start]))] + stack = [(start, get_children(start))] depth_now = 1 while stack: parent, children = stack[-1] @@ -92,7 +103,7 @@ def dfs_edges(G, source=None, depth_limit=None): yield parent, child visited.add(child) if depth_now < depth_limit: - stack.append((child, iter(G[child]))) + stack.append((child, get_children(child))) depth_now += 1 break else: @@ -101,7 +112,7 @@ def dfs_edges(G, source=None, depth_limit=None): @nx._dispatchable -def dfs_tree(G, source=None, depth_limit=None): +def dfs_tree(G, source=None, depth_limit=None, *, sort_neighbors=None): """Returns oriented tree constructed from a depth-first-search from source. Parameters @@ -114,6 +125,11 @@ def dfs_tree(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Returns ------- T : NetworkX DiGraph @@ -134,20 +150,20 @@ def dfs_tree(G, source=None, depth_limit=None): dfs_preorder_nodes dfs_postorder_nodes dfs_labeled_edges - edge_dfs - bfs_tree + :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs` + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree` """ T = nx.DiGraph() if source is None: T.add_nodes_from(G) else: T.add_node(source) - T.add_edges_from(dfs_edges(G, source, depth_limit)) + T.add_edges_from(dfs_edges(G, source, depth_limit, sort_neighbors=sort_neighbors)) return T @nx._dispatchable -def dfs_predecessors(G, source=None, depth_limit=None): +def dfs_predecessors(G, source=None, depth_limit=None, *, sort_neighbors=None): """Returns dictionary of predecessors in depth-first-search from source. Parameters @@ -163,6 +179,11 @@ def dfs_predecessors(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Returns ------- pred: dict @@ -194,14 +215,17 @@ def dfs_predecessors(G, source=None, depth_limit=None): dfs_preorder_nodes dfs_postorder_nodes dfs_labeled_edges - edge_dfs - bfs_tree + :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs` + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree` """ - return {t: s for s, t in dfs_edges(G, source, depth_limit)} + return { + t: s + for s, t in dfs_edges(G, source, depth_limit, sort_neighbors=sort_neighbors) + } @nx._dispatchable -def dfs_successors(G, source=None, depth_limit=None): +def dfs_successors(G, source=None, depth_limit=None, *, sort_neighbors=None): """Returns dictionary of successors in depth-first-search from source. Parameters @@ -217,6 +241,11 @@ def dfs_successors(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Returns ------- succ: dict @@ -248,17 +277,22 @@ def dfs_successors(G, source=None, depth_limit=None): dfs_preorder_nodes dfs_postorder_nodes dfs_labeled_edges - edge_dfs - bfs_tree + :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs` + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree` """ d = defaultdict(list) - for s, t in dfs_edges(G, source=source, depth_limit=depth_limit): + for s, t in dfs_edges( + G, + source=source, + depth_limit=depth_limit, + sort_neighbors=sort_neighbors, + ): d[s].append(t) return dict(d) @nx._dispatchable -def dfs_postorder_nodes(G, source=None, depth_limit=None): +def dfs_postorder_nodes(G, source=None, depth_limit=None, *, sort_neighbors=None): """Generate nodes in a depth-first-search post-ordering starting at source. Parameters @@ -271,6 +305,11 @@ def dfs_postorder_nodes(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Returns ------- nodes: generator @@ -302,15 +341,17 @@ def dfs_postorder_nodes(G, source=None, depth_limit=None): dfs_edges dfs_preorder_nodes dfs_labeled_edges - edge_dfs - bfs_tree + :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs` + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree` """ - edges = nx.dfs_labeled_edges(G, source=source, depth_limit=depth_limit) + edges = nx.dfs_labeled_edges( + G, source=source, depth_limit=depth_limit, sort_neighbors=sort_neighbors + ) return (v for u, v, d in edges if d == "reverse") @nx._dispatchable -def dfs_preorder_nodes(G, source=None, depth_limit=None): +def dfs_preorder_nodes(G, source=None, depth_limit=None, *, sort_neighbors=None): """Generate nodes in a depth-first-search pre-ordering starting at source. Parameters @@ -324,6 +365,11 @@ def dfs_preorder_nodes(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Returns ------- nodes: generator @@ -355,14 +401,16 @@ def dfs_preorder_nodes(G, source=None, depth_limit=None): dfs_edges dfs_postorder_nodes dfs_labeled_edges - bfs_edges + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_edges` """ - edges = nx.dfs_labeled_edges(G, source=source, depth_limit=depth_limit) + edges = nx.dfs_labeled_edges( + G, source=source, depth_limit=depth_limit, sort_neighbors=sort_neighbors + ) return (v for u, v, d in edges if d == "forward") @nx._dispatchable -def dfs_labeled_edges(G, source=None, depth_limit=None): +def dfs_labeled_edges(G, source=None, depth_limit=None, *, sort_neighbors=None): """Iterate over edges in a depth-first-search (DFS) labeled by type. Parameters @@ -376,6 +424,11 @@ def dfs_labeled_edges(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Returns ------- edges: generator @@ -439,13 +492,19 @@ def dfs_labeled_edges(G, source=None, depth_limit=None): if depth_limit is None: depth_limit = len(G) + get_children = ( + G.neighbors + if sort_neighbors is None + else lambda n: iter(sort_neighbors(G.neighbors(n))) + ) + visited = set() for start in nodes: if start in visited: continue yield start, start, "forward" visited.add(start) - stack = [(start, iter(G[start]))] + stack = [(start, get_children(start))] depth_now = 1 while stack: parent, children = stack[-1] @@ -456,7 +515,7 @@ def dfs_labeled_edges(G, source=None, depth_limit=None): yield parent, child, "forward" visited.add(child) if depth_now < depth_limit: - stack.append((child, iter(G[child]))) + stack.append((child, iter(get_children(child)))) depth_now += 1 break else: diff --git a/networkx/algorithms/traversal/tests/test_dfs.py b/networkx/algorithms/traversal/tests/test_dfs.py index 0eb698b0f2d..e43d7d61629 100644 --- a/networkx/algorithms/traversal/tests/test_dfs.py +++ b/networkx/algorithms/traversal/tests/test_dfs.py @@ -55,6 +55,14 @@ def test_dfs_edges(self): edges = nx.dfs_edges(self.D) assert list(edges) == [(0, 1), (2, 3)] + def test_dfs_edges_sorting(self): + G = nx.Graph([(0, 1), (1, 2), (1, 3), (2, 4), (3, 0), (0, 4)]) + edges_asc = nx.dfs_edges(G, source=0, sort_neighbors=sorted) + sorted_desc = lambda x: sorted(x, reverse=True) + edges_desc = nx.dfs_edges(G, source=0, sort_neighbors=sorted_desc) + assert list(edges_asc) == [(0, 1), (1, 2), (2, 4), (1, 3)] + assert list(edges_desc) == [(0, 4), (4, 2), (2, 1), (1, 3)] + def test_dfs_labeled_edges(self): edges = list(nx.dfs_labeled_edges(self.G, source=0)) forward = [(u, v) for (u, v, d) in edges if d == "forward"] @@ -80,6 +88,52 @@ def test_dfs_labeled_edges(self): (0, 0, "reverse"), ] + def test_dfs_labeled_edges_sorting(self): + G = nx.Graph([(0, 1), (1, 2), (1, 3), (2, 4), (3, 0), (0, 4)]) + edges_asc = nx.dfs_labeled_edges(G, source=0, sort_neighbors=sorted) + sorted_desc = lambda x: sorted(x, reverse=True) + edges_desc = nx.dfs_labeled_edges(G, source=0, sort_neighbors=sorted_desc) + assert list(edges_asc) == [ + (0, 0, "forward"), + (0, 1, "forward"), + (1, 0, "nontree"), + (1, 2, "forward"), + (2, 1, "nontree"), + (2, 4, "forward"), + (4, 0, "nontree"), + (4, 2, "nontree"), + (2, 4, "reverse"), + (1, 2, "reverse"), + (1, 3, "forward"), + (3, 0, "nontree"), + (3, 1, "nontree"), + (1, 3, "reverse"), + (0, 1, "reverse"), + (0, 3, "nontree"), + (0, 4, "nontree"), + (0, 0, "reverse"), + ] + assert list(edges_desc) == [ + (0, 0, "forward"), + (0, 4, "forward"), + (4, 2, "forward"), + (2, 4, "nontree"), + (2, 1, "forward"), + (1, 3, "forward"), + (3, 1, "nontree"), + (3, 0, "nontree"), + (1, 3, "reverse"), + (1, 2, "nontree"), + (1, 0, "nontree"), + (2, 1, "reverse"), + (4, 2, "reverse"), + (4, 0, "nontree"), + (0, 4, "reverse"), + (0, 3, "nontree"), + (0, 1, "nontree"), + (0, 0, "reverse"), + ] + def test_dfs_labeled_disconnected_edges(self): edges = list(nx.dfs_labeled_edges(self.D)) forward = [(u, v) for (u, v, d) in edges if d == "forward"] From 2da36864c5899cbc55fc98c96635390dd96d5f88 Mon Sep 17 00:00:00 2001 From: William Zijie Zhang <89562186+Transurgeon@users.noreply.github.com> Date: Mon, 29 Jan 2024 13:58:12 -0500 Subject: [PATCH 051/129] Add Kneser graph creation function (#7146) Adds the kneser_graph graph generation function. Co-authored-by: Transurgeon Co-authored-by: Ross Barnowski --- doc/reference/generators.rst | 1 + networkx/generators/classic.py | 48 +++++++++++++++++++++++ networkx/generators/tests/test_classic.py | 16 ++++++++ 3 files changed, 65 insertions(+) diff --git a/doc/reference/generators.rst b/doc/reference/generators.rst index 14fbb594847..d3b980fec92 100644 --- a/doc/reference/generators.rst +++ b/doc/reference/generators.rst @@ -34,6 +34,7 @@ Classic dorogovtsev_goltsev_mendes_graph empty_graph full_rary_tree + kneser_graph ladder_graph lollipop_graph null_graph diff --git a/networkx/generators/classic.py b/networkx/generators/classic.py index e220f82d8fd..14ed49ecbdb 100644 --- a/networkx/generators/classic.py +++ b/networkx/generators/classic.py @@ -30,6 +30,7 @@ "dorogovtsev_goltsev_mendes_graph", "empty_graph", "full_rary_tree", + "kneser_graph", "ladder_graph", "lollipop_graph", "null_graph", @@ -103,6 +104,53 @@ def full_rary_tree(r, n, create_using=None): return G +@nx._dispatchable(graphs=None) +def kneser_graph(n, k): + """Returns the Kneser Graph with parameters `n` and `k`. + + The Kneser Graph has nodes that are k-tuples (subsets) of the integers + between 0 and ``n-1``. Nodes are adjacent if their corresponding sets are disjoint. + + Parameters + ---------- + n: int + Number of integers from which to make node subsets. + Subsets are drawn from ``set(range(n))``. + k: int + Size of the subsets. + + Returns + ------- + G : NetworkX Graph + + Examples + -------- + >>> G = nx.kneser_graph(5, 2) + >>> G.number_of_nodes() + 10 + >>> G.number_of_edges() + 15 + >>> nx.is_isomorphic(G, nx.petersen_graph()) + True + """ + if n <= 0: + raise NetworkXError("n should be greater than zero") + if k <= 0 or k > n: + raise NetworkXError("k should be greater than zero and smaller than n") + + G = nx.Graph() + # Create all k-subsets of [0, 1, ..., n-1] + subsets = list(itertools.combinations(range(n), k)) + + if 2 * k > n: + G.add_nodes_from(subsets) + + universe = set(range(n)) + comb = itertools.combinations # only to make it all fit on one line + G.add_edges_from((s, t) for s in subsets for t in comb(universe - set(s), k)) + return G + + @nx._dispatchable(graphs=None) def balanced_tree(r, h, create_using=None): """Returns the perfectly balanced `r`-ary tree of height `h`. diff --git a/networkx/generators/tests/test_classic.py b/networkx/generators/tests/test_classic.py index 5e24c7779e2..cf03d3ea707 100644 --- a/networkx/generators/tests/test_classic.py +++ b/networkx/generators/tests/test_classic.py @@ -617,3 +617,19 @@ def test_complete_multipartite_graph(self): assert G.nodes[u] != G.nodes[v] with pytest.raises(nx.NetworkXError, match="Negative number of nodes"): nx.complete_multipartite_graph(2, -3, 4) + + def test_kneser_graph(self): + # the petersen graph is a special case of the kneser graph when n=5 and k=2 + assert is_isomorphic(nx.kneser_graph(5, 2), nx.petersen_graph()) + + # when k is 1, the kneser graph returns a complete graph with n vertices + for i in range(1, 7): + assert is_isomorphic(nx.kneser_graph(i, 1), nx.complete_graph(i)) + + # the kneser graph of n and n-1 is the empty graph with n vertices + for j in range(3, 7): + assert is_isomorphic(nx.kneser_graph(j, j - 1), nx.empty_graph(j)) + + # in general the number of edges of the kneser graph is equal to + # (n choose k) times (n-k choose k) divided by 2 + assert nx.number_of_edges(nx.kneser_graph(8, 3)) == 280 From 30ac7956569cb0c4f0787600f8537425d05c5dca Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Mon, 29 Jan 2024 16:45:41 -0800 Subject: [PATCH 052/129] Update general_k_edge_subgraphs docstring. (#7254) --- .../connectivity/edge_kcomponents.py | 25 ++++++++++++------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/networkx/algorithms/connectivity/edge_kcomponents.py b/networkx/algorithms/connectivity/edge_kcomponents.py index d2ed1ac5c7d..9dcdf71a2e5 100644 --- a/networkx/algorithms/connectivity/edge_kcomponents.py +++ b/networkx/algorithms/connectivity/edge_kcomponents.py @@ -505,17 +505,24 @@ def _high_degree_components(G, k): @nx._dispatchable def general_k_edge_subgraphs(G, k): - """General algorithm to find all maximal k-edge-connected subgraphs in G. + """General algorithm to find all maximal k-edge-connected subgraphs in `G`. - Returns - ------- - k_edge_subgraphs : a generator of nx.Graphs that are k-edge-subgraphs - Each k-edge-subgraph is a maximal set of nodes that defines a subgraph - of G that is k-edge-connected. + Parameters + ---------- + G : nx.Graph + Graph in which all maximal k-edge-connected subgraphs will be found. + + k : int + + Yields + ------ + k_edge_subgraphs : Graph instances that are k-edge-subgraphs + Each k-edge-subgraph contains a maximal set of nodes that defines a + subgraph of `G` that is k-edge-connected. Notes ----- - Implementation of the basic algorithm from _[1]. The basic idea is to find + Implementation of the basic algorithm from [1]_. The basic idea is to find a global minimum cut of the graph. If the cut value is at least k, then the graph is a k-edge-connected subgraph and can be added to the results. Otherwise, the cut is used to split the graph in two and the procedure is @@ -524,7 +531,7 @@ def general_k_edge_subgraphs(G, k): a single node or a subgraph of G that is k-edge-connected. This implementation contains optimizations for reducing the number of calls - to max-flow, but there are other optimizations in _[1] that could be + to max-flow, but there are other optimizations in [1]_ that could be implemented. References @@ -547,7 +554,7 @@ def general_k_edge_subgraphs(G, k): ... (14, 101, 24), ... ] >>> G = nx.Graph(it.chain(*[pairwise(path) for path in paths])) - >>> sorted(map(len, k_edge_subgraphs(G, k=3))) + >>> sorted(len(k_sg) for k_sg in k_edge_subgraphs(G, k=3)) [1, 1, 1, 4, 4] """ if k < 1: From f488d0862a33b921f000b501c7c5dec10ea2474e Mon Sep 17 00:00:00 2001 From: Vanshika Mishra <74042272+vanshika230@users.noreply.github.com> Date: Wed, 31 Jan 2024 22:38:22 +0530 Subject: [PATCH 053/129] Improving test coverage for Small.py (#7260) * Improving test coverage for Small.py * Changes required Co-authored-by: Ross Barnowski --------- Co-authored-by: Ross Barnowski --- networkx/generators/tests/test_small.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/networkx/generators/tests/test_small.py b/networkx/generators/tests/test_small.py index 836cbce443c..355d6d36af5 100644 --- a/networkx/generators/tests/test_small.py +++ b/networkx/generators/tests/test_small.py @@ -34,6 +34,9 @@ def test__LCF_graph(self): utility_graph = nx.complete_bipartite_graph(3, 3) assert is_isomorphic(G, utility_graph) + with pytest.raises(nx.NetworkXError, match="Directed Graph not supported"): + G = nx.LCF_graph(6, [3, -3], 3, create_using=nx.DiGraph) + def test_properties_named_small_graphs(self): G = nx.bull_graph() assert sorted(G) == list(range(5)) From 215470ae23a3f39b2c73da307260819921635921 Mon Sep 17 00:00:00 2001 From: Nihal John George <31016048+nihalgeorge01@users.noreply.github.com> Date: Wed, 31 Jan 2024 12:48:56 -0500 Subject: [PATCH 054/129] Fix rich_club_coefficient() for small graphs (#7212) * Improves nx.degree_histogram for empty graph corner case: return empty list instead of raising exception * Fix rich_club_coefficient for graphs with fewer than four nodes, where normalization is not possible --------- Co-authored-by: Ross Barnowski --- networkx/algorithms/richclub.py | 17 +++++++ networkx/algorithms/tests/test_richclub.py | 52 ++++++++++++++++++++++ networkx/classes/function.py | 2 +- networkx/classes/tests/test_function.py | 5 +++ 4 files changed, 75 insertions(+), 1 deletion(-) diff --git a/networkx/algorithms/richclub.py b/networkx/algorithms/richclub.py index f787f4254bf..445b27d1425 100644 --- a/networkx/algorithms/richclub.py +++ b/networkx/algorithms/richclub.py @@ -44,6 +44,12 @@ def rich_club_coefficient(G, normalized=True, Q=100, seed=None): rc : dictionary A dictionary, keyed by degree, with rich-club coefficient values. + Raises + ------ + NetworkXError + If `G` has fewer than four nodes and ``normalized=True``. + A randomly sampled graph for normalization cannot be generated in this case. + Examples -------- >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) @@ -57,6 +63,14 @@ def rich_club_coefficient(G, normalized=True, Q=100, seed=None): algorithm ignores any edge weights and is not defined for directed graphs or graphs with parallel edges or self loops. + Normalization is done by computing the rich club coefficient for a randomly + sampled graph with the same degree distribution as `G` by + repeatedly swapping the endpoints of existing edges. For graphs with fewer than 4 + nodes, it is not possible to generate a random graph with a prescribed + degree distribution, as the degree distribution fully determines the graph + (hence making the coefficients trivially normalized to 1). + This function raises an exception in this case. + Estimates for appropriate values of `Q` are found in [2]_. References @@ -108,6 +122,9 @@ def _compute_rc(G): # side of the list, which would have a linear time cost. edge_degrees = sorted((sorted(map(G.degree, e)) for e in G.edges()), reverse=True) ek = G.number_of_edges() + if ek == 0: + return {} + k1, k2 = edge_degrees.pop() rc = {} for d, nk in enumerate(nks): diff --git a/networkx/algorithms/tests/test_richclub.py b/networkx/algorithms/tests/test_richclub.py index 5638ddbf007..8d83abaea18 100644 --- a/networkx/algorithms/tests/test_richclub.py +++ b/networkx/algorithms/tests/test_richclub.py @@ -91,6 +91,58 @@ def test_rich_club_selfloop(): nx.rich_club_coefficient(G) +def test_rich_club_leq_3_nodes_unnormalized(): + # edgeless graphs upto 3 nodes + G = nx.Graph() + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {} + + for i in range(3): + G.add_node(i) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {} + + # 2 nodes, single edge + G = nx.Graph() + G.add_edge(0, 1) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {0: 1} + + # 3 nodes, single edge + G = nx.Graph() + G.add_nodes_from([0, 1, 2]) + G.add_edge(0, 1) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {0: 1} + + # 3 nodes, 2 edges + G.add_edge(1, 2) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {0: 2 / 3} + + # 3 nodes, 3 edges + G.add_edge(0, 2) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {0: 1, 1: 1} + + +def test_rich_club_leq_3_nodes_normalized(): + G = nx.Graph() + with pytest.raises( + nx.exception.NetworkXError, + match="Graph has fewer than four nodes", + ): + rc = nx.rich_club_coefficient(G, normalized=True) + + for i in range(3): + G.add_node(i) + with pytest.raises( + nx.exception.NetworkXError, + match="Graph has fewer than four nodes", + ): + rc = nx.rich_club_coefficient(G, normalized=True) + + # def test_richclub2_normalized(): # T = nx.balanced_tree(2,10) # rcNorm = nx.richclub.rich_club_coefficient(T,Q=2) diff --git a/networkx/classes/function.py b/networkx/classes/function.py index 2746d56d715..f6bb9a476c2 100644 --- a/networkx/classes/function.py +++ b/networkx/classes/function.py @@ -156,7 +156,7 @@ def degree_histogram(G): (Order(number_of_edges)) """ counts = Counter(d for n, d in G.degree()) - return [counts.get(i, 0) for i in range(max(counts) + 1)] + return [counts.get(i, 0) for i in range(max(counts) + 1 if counts else 0)] def is_directed(G): diff --git a/networkx/classes/tests/test_function.py b/networkx/classes/tests/test_function.py index 61b73c2d256..e5596f4fb0c 100644 --- a/networkx/classes/tests/test_function.py +++ b/networkx/classes/tests/test_function.py @@ -6,6 +6,11 @@ from networkx.utils import edges_equal, nodes_equal +def test_degree_histogram_empty(): + G = nx.Graph() + assert nx.degree_histogram(G) == [] + + class TestFunction: def setup_method(self): self.G = nx.Graph({0: [1, 2, 3], 1: [1, 2, 0], 4: []}, name="Test") From 3bdf3bbf2f34d6cbcac9261ceddb53d0858fe070 Mon Sep 17 00:00:00 2001 From: Aditi Juneja <91629733+Schefflera-Arboricola@users.noreply.github.com> Date: Wed, 31 Jan 2024 23:36:09 +0530 Subject: [PATCH 055/129] Test for symmetric edge flow betweenness partition (#7251) Add more valid partitions to test cases --- .../community/tests/test_divisive.py | 21 ++++++++----------- 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/networkx/algorithms/community/tests/test_divisive.py b/networkx/algorithms/community/tests/test_divisive.py index f3857613db2..874e8c1677b 100644 --- a/networkx/algorithms/community/tests/test_divisive.py +++ b/networkx/algorithms/community/tests/test_divisive.py @@ -52,10 +52,9 @@ def test_edge_current_flow_betweenness_partition(): G = nx.barbell_graph(3, 1) C = nx.community.edge_current_flow_betweenness_partition(G, 2) - answer = [{0, 1, 2, 3}, {4, 5, 6}] - assert len(C) == len(answer) - for s in answer: - assert s in C + answers = [[{0, 1, 2, 3}, {4, 5, 6}], [{0, 1, 2}, {3, 4, 5, 6}]] + assert len(C) == len(answers[0]) + assert any(all(s in answer for s in C) for answer in answers) C = nx.community.edge_current_flow_betweenness_partition(G, 3) answer = [{0, 1, 2}, {4, 5, 6}, {3}] @@ -64,10 +63,9 @@ def test_edge_current_flow_betweenness_partition(): assert s in C C = nx.community.edge_current_flow_betweenness_partition(G, 4) - answer = [{1, 2}, {4, 5, 6}, {3}, {0}] - assert len(C) == len(answer) - for s in answer: - assert s in C + answers = [[{1, 2}, {4, 5, 6}, {3}, {0}], [{0, 1, 2}, {5, 6}, {3}, {4}]] + assert len(C) == len(answers[0]) + assert any(all(s in answer for s in C) for answer in answers) C = nx.community.edge_current_flow_betweenness_partition(G, 5) answer = [{1, 2}, {5, 6}, {3}, {0}, {4}] @@ -76,10 +74,9 @@ def test_edge_current_flow_betweenness_partition(): assert s in C C = nx.community.edge_current_flow_betweenness_partition(G, 6) - answer = [{2}, {5, 6}, {3}, {0}, {4}, {1}] - assert len(C) == len(answer) - for s in answer: - assert s in C + answers = [[{2}, {5, 6}, {3}, {0}, {4}, {1}], [{1, 2}, {6}, {3}, {0}, {4}, {5}]] + assert len(C) == len(answers[0]) + assert any(all(s in answer for s in C) for answer in answers) C = nx.community.edge_current_flow_betweenness_partition(G, 7) answer = [{n} for n in G] From 7c0d8812d87c851bc646c1f2739a223885a39b6a Mon Sep 17 00:00:00 2001 From: Aditi Juneja <91629733+Schefflera-Arboricola@users.noreply.github.com> Date: Thu, 1 Feb 2024 23:32:40 +0530 Subject: [PATCH 056/129] MAINT : added `seed` to `gnm_random_graph` in `community/tests/test_label_propagation.py` (#7264) added seed to tests --- networkx/algorithms/community/tests/test_label_propagation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/networkx/algorithms/community/tests/test_label_propagation.py b/networkx/algorithms/community/tests/test_label_propagation.py index d86b1d7d47f..4be72dbf272 100644 --- a/networkx/algorithms/community/tests/test_label_propagation.py +++ b/networkx/algorithms/community/tests/test_label_propagation.py @@ -193,7 +193,7 @@ def test_bipartite_graph(self): self._check_communities(G, truth) def test_random_graph(self): - G = nx.gnm_random_graph(self.N, self.N * self.K // 2) + G = nx.gnm_random_graph(self.N, self.N * self.K // 2, seed=42) truth = {frozenset(G)} self._check_communities(G, truth) @@ -209,7 +209,7 @@ def test_ring_of_cliques(self): self._check_communities(G, truth) def test_larger_graph(self): - G = nx.gnm_random_graph(100 * self.N, 50 * self.N * self.K) + G = nx.gnm_random_graph(100 * self.N, 50 * self.N * self.K, seed=42) nx.community.fast_label_propagation_communities(G) def test_graph_type(self): From 42666fa19e0ac2a2ba8e074d06c08f971a5d160e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Feb 2024 16:44:56 -0800 Subject: [PATCH 057/129] Bump scientific-python/upload-nightly-action from 0.2.0 to 0.3.0 (#7266) Bumps [scientific-python/upload-nightly-action](https://github.com/scientific-python/upload-nightly-action) from 0.2.0 to 0.3.0. - [Release notes](https://github.com/scientific-python/upload-nightly-action/releases) - [Commits](https://github.com/scientific-python/upload-nightly-action/compare/5fb764c5bce1ac2297084c0f7161b1919f17c74f...6e9304f7a3a5501c6f98351537493ec898728299) --- updated-dependencies: - dependency-name: scientific-python/upload-nightly-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/nightly.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index b274f5c7f21..a8bc0c0575f 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -27,7 +27,7 @@ jobs: - name: List contents of wheel run: python -m zipfile --list dist/networkx-*.whl - name: Upload nighlty wheel - uses: scientific-python/upload-nightly-action@5fb764c5bce1ac2297084c0f7161b1919f17c74f # 0.2.0 + uses: scientific-python/upload-nightly-action@6e9304f7a3a5501c6f98351537493ec898728299 # 0.3.0 with: anaconda_nightly_upload_token: ${{ secrets.ANACONDA_NIGHTLY }} artifacts_path: dist/ From 72d1f680f60213102df1ab07c5eef82605944594 Mon Sep 17 00:00:00 2001 From: Mridul Seth Date: Fri, 2 Feb 2024 14:04:20 +0530 Subject: [PATCH 058/129] ENH: Speed up common/non_neighbors by using _adj dict operations (#7244) * ENH: Speed up common_neighbors by using _adj dict operations * Speed up non_neighbors * need for speed * Update networkx/classes/function.py Co-authored-by: Dan Schult * Update networkx/algorithms/link_prediction.py Co-authored-by: Dan Schult * Add benchmarks for non_neighbors. * Add benchmarks for common_neighbors. --------- Co-authored-by: Dan Schult Co-authored-by: Ross Barnowski Co-authored-by: Ross Barnowski --- benchmarks/benchmarks/benchmark_neighbors.py | 51 ++++++++++++++++++ networkx/algorithms/link_prediction.py | 13 +++-- networkx/classes/function.py | 57 ++++++++++---------- networkx/classes/tests/test_function.py | 8 +-- 4 files changed, 91 insertions(+), 38 deletions(-) create mode 100644 benchmarks/benchmarks/benchmark_neighbors.py diff --git a/benchmarks/benchmarks/benchmark_neighbors.py b/benchmarks/benchmarks/benchmark_neighbors.py new file mode 100644 index 00000000000..e821f3cad95 --- /dev/null +++ b/benchmarks/benchmarks/benchmark_neighbors.py @@ -0,0 +1,51 @@ +import networkx as nx + + +# NOTE: explicit set construction in benchmarks is required for meaningful +# comparisons due to change in return type from generator -> set. See gh-7244. +class NonNeighbors: + param_names = ["num_nodes"] + params = [10, 100, 1000] + + def setup(self, num_nodes): + self.star_graph = nx.star_graph(num_nodes) + self.complete_graph = nx.complete_graph(num_nodes) + self.path_graph = nx.path_graph(num_nodes) + + def time_star_center(self, num_nodes): + set(nx.non_neighbors(self.star_graph, 0)) + + def time_star_rim(self, num_nodes): + set(nx.non_neighbors(self.star_graph, 5)) + + def time_complete(self, num_nodes): + set(nx.non_neighbors(self.complete_graph, 0)) + + def time_path_first(self, num_nodes): + set(nx.non_neighbors(self.path_graph, 0)) + + def time_path_last(self, num_nodes): + set(nx.non_neighbors(self.path_graph, num_nodes - 1)) + + def time_path_center(self, num_nodes): + set(nx.non_neighbors(self.path_graph, num_nodes // 2)) + + +# NOTE: explicit set construction in benchmarks is required for meaningful +# comparisons due to change in return type from generator -> set. See gh-7244. +class CommonNeighbors: + param_names = ["num_nodes"] + params = [10, 100, 1000] + + def setup(self, num_nodes): + self.star_graph = nx.star_graph(num_nodes) + self.complete_graph = nx.complete_graph(num_nodes) + + def time_star_center_rim(self, num_nodes): + set(nx.common_neighbors(self.star_graph, 0, num_nodes // 2)) + + def time_star_rim_rim(self, num_nodes): + set(nx.common_neighbors(self.star_graph, 4, 5)) + + def time_complete(self, num_nodes): + set(nx.common_neighbors(self.complete_graph, 0, num_nodes // 2)) diff --git a/networkx/algorithms/link_prediction.py b/networkx/algorithms/link_prediction.py index 56f7bc5c1ba..1fb24243a16 100644 --- a/networkx/algorithms/link_prediction.py +++ b/networkx/algorithms/link_prediction.py @@ -169,7 +169,7 @@ def predict(u, v): union_size = len(set(G[u]) | set(G[v])) if union_size == 0: return 0 - return len(list(nx.common_neighbors(G, u, v))) / union_size + return len(nx.common_neighbors(G, u, v)) / union_size return _apply_prediction(G, predict, ebunch) @@ -329,7 +329,7 @@ def predict(u, v): if u == v: raise nx.NetworkXAlgorithmError("Self loops are not supported") - return sum(1 for _ in nx.common_neighbors(G, u, v)) + return len(nx.common_neighbors(G, u, v)) else: spl = dict(nx.shortest_path_length(G)) @@ -340,9 +340,8 @@ def predict(u, v): raise nx.NetworkXAlgorithmError("Self loops are not supported") path_len = spl[u].get(v, inf) - return alpha * sum(1 for _ in nx.common_neighbors(G, u, v)) + ( - 1 - alpha - ) * (G.number_of_nodes() / path_len) + n_nbrs = len(nx.common_neighbors(G, u, v)) + return alpha * n_nbrs + (1 - alpha) * len(G) / path_len return _apply_prediction(G, predict, ebunch) @@ -486,7 +485,7 @@ def cn_soundarajan_hopcroft(G, ebunch=None, community="community"): def predict(u, v): Cu = _community(G, u, community) Cv = _community(G, v, community) - cnbors = list(nx.common_neighbors(G, u, v)) + cnbors = nx.common_neighbors(G, u, v) neighbors = ( sum(_community(G, w, community) == Cu for w in cnbors) if Cu == Cv else 0 ) @@ -670,7 +669,7 @@ def predict(u, v): Cv = _community(G, v, community) if Cu != Cv: return 0 - cnbors = set(nx.common_neighbors(G, u, v)) + cnbors = nx.common_neighbors(G, u, v) within = {w for w in cnbors if _community(G, w, community) == Cu} inter = cnbors - within return len(within) / (len(inter) + delta) diff --git a/networkx/classes/function.py b/networkx/classes/function.py index f6bb9a476c2..726b3e23b26 100644 --- a/networkx/classes/function.py +++ b/networkx/classes/function.py @@ -809,13 +809,13 @@ def set_edge_attributes(G, values, name=None): if G.is_multigraph(): for (u, v, key), value in values.items(): try: - G[u][v][key][name] = value + G._adj[u][v][key][name] = value except KeyError: pass else: for (u, v), value in values.items(): try: - G[u][v][name] = value + G._adj[u][v][name] = value except KeyError: pass except AttributeError: @@ -827,13 +827,13 @@ def set_edge_attributes(G, values, name=None): if G.is_multigraph(): for (u, v, key), d in values.items(): try: - G[u][v][key].update(d) + G._adj[u][v][key].update(d) except KeyError: pass else: for (u, v), d in values.items(): try: - G[u][v].update(d) + G._adj[u][v].update(d) except KeyError: pass @@ -918,11 +918,10 @@ def non_neighbors(graph, node): Returns ------- - non_neighbors : iterator - Iterator of nodes in the graph that are not neighbors of the node. + non_neighbors : set + Set of nodes in the graph that are not neighbors of the node. """ - nbors = set(neighbors(graph, node)) | {node} - return (nnode for nnode in graph if nnode not in nbors) + return graph._adj.keys() - graph._adj[node].keys() - {node} def non_edges(graph): @@ -964,8 +963,8 @@ def common_neighbors(G, u, v): Returns ------- - cnbors : iterator - Iterator of common neighbors of u and v in the graph. + cnbors : set + Set of common neighbors of u and v in the graph. Raises ------ @@ -983,9 +982,7 @@ def common_neighbors(G, u, v): if v not in G: raise nx.NetworkXError("v is not in the graph.") - # Return a generator explicitly instead of yielding so that the above - # checks are executed eagerly. - return (w for w in G[u] if w in G[v] and w not in (u, v)) + return G._adj[u].keys() & G._adj[v].keys() - {u, v} def is_weighted(G, edge=None, weight="weight"): @@ -1114,7 +1111,7 @@ def is_empty(G): is the number of nodes in the graph. """ - return not any(G.adj.values()) + return not any(G._adj.values()) def nodes_with_selfloops(G): @@ -1141,7 +1138,7 @@ def nodes_with_selfloops(G): [1] """ - return (n for n, nbrs in G.adj.items() if n in nbrs) + return (n for n, nbrs in G._adj.items() if n in nbrs) def selfloop_edges(G, data=False, keys=False, default=None): @@ -1191,56 +1188,59 @@ def selfloop_edges(G, data=False, keys=False, default=None): if keys is True: return ( (n, n, k, d) - for n, nbrs in G.adj.items() + for n, nbrs in G._adj.items() if n in nbrs for k, d in nbrs[n].items() ) else: return ( (n, n, d) - for n, nbrs in G.adj.items() + for n, nbrs in G._adj.items() if n in nbrs for d in nbrs[n].values() ) else: - return ((n, n, nbrs[n]) for n, nbrs in G.adj.items() if n in nbrs) + return ((n, n, nbrs[n]) for n, nbrs in G._adj.items() if n in nbrs) elif data is not False: if G.is_multigraph(): if keys is True: return ( (n, n, k, d.get(data, default)) - for n, nbrs in G.adj.items() + for n, nbrs in G._adj.items() if n in nbrs for k, d in nbrs[n].items() ) else: return ( (n, n, d.get(data, default)) - for n, nbrs in G.adj.items() + for n, nbrs in G._adj.items() if n in nbrs for d in nbrs[n].values() ) else: return ( (n, n, nbrs[n].get(data, default)) - for n, nbrs in G.adj.items() + for n, nbrs in G._adj.items() if n in nbrs ) else: if G.is_multigraph(): if keys is True: return ( - (n, n, k) for n, nbrs in G.adj.items() if n in nbrs for k in nbrs[n] + (n, n, k) + for n, nbrs in G._adj.items() + if n in nbrs + for k in nbrs[n] ) else: return ( (n, n) - for n, nbrs in G.adj.items() + for n, nbrs in G._adj.items() if n in nbrs for i in range(len(nbrs[n])) # for easy edge removal (#4068) ) else: - return ((n, n) for n, nbrs in G.adj.items() if n in nbrs) + return ((n, n) for n, nbrs in G._adj.items() if n in nbrs) def number_of_selfloops(G): @@ -1288,7 +1288,10 @@ def is_path(G, path): True if `path` is a valid path in `G` """ - return all((node in G and nbr in G[node]) for node, nbr in nx.utils.pairwise(path)) + try: + return all(nbr in G._adj[node] for node, nbr in nx.utils.pairwise(path)) + except (KeyError, TypeError): + return False def path_weight(G, path, weight): @@ -1323,7 +1326,7 @@ def path_weight(G, path, weight): raise nx.NetworkXNoPath("path does not exist") for node, nbr in nx.utils.pairwise(path): if multigraph: - cost += min(v[weight] for v in G[node][nbr].values()) + cost += min(v[weight] for v in G._adj[node][nbr].values()) else: - cost += G[node][nbr][weight] + cost += G._adj[node][nbr][weight] return cost diff --git a/networkx/classes/tests/test_function.py b/networkx/classes/tests/test_function.py index e5596f4fb0c..ee4cca8372c 100644 --- a/networkx/classes/tests/test_function.py +++ b/networkx/classes/tests/test_function.py @@ -302,13 +302,13 @@ def test_neighbors_complete_graph(self): def test_non_neighbors(self): graph = nx.complete_graph(100) pop = random.sample(list(graph), 1) - nbors = list(nx.non_neighbors(graph, pop[0])) + nbors = nx.non_neighbors(graph, pop[0]) # should be all the other vertices in the graph assert len(nbors) == 0 graph = nx.path_graph(100) node = random.sample(list(graph), 1)[0] - nbors = list(nx.non_neighbors(graph, node)) + nbors = nx.non_neighbors(graph, node) # should be all the other vertices in the graph if node != 0 and node != 99: assert len(nbors) == 97 @@ -317,13 +317,13 @@ def test_non_neighbors(self): # create a star graph with 99 outer nodes graph = nx.star_graph(99) - nbors = list(nx.non_neighbors(graph, 0)) + nbors = nx.non_neighbors(graph, 0) assert len(nbors) == 0 # disconnected graph graph = nx.Graph() graph.add_nodes_from(range(10)) - nbors = list(nx.non_neighbors(graph, 0)) + nbors = nx.non_neighbors(graph, 0) assert len(nbors) == 9 def test_non_edges(self): From d769e3b361f5287cc14df8e1d4f1703e2c261217 Mon Sep 17 00:00:00 2001 From: Ross Barnowski Date: Fri, 2 Feb 2024 01:14:08 -0800 Subject: [PATCH 059/129] Update docstring of nonisomorphic_trees. (#7255) --- networkx/generators/nonisomorphic_trees.py | 30 ++++++++++------------ 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/networkx/generators/nonisomorphic_trees.py b/networkx/generators/nonisomorphic_trees.py index 6f5d6412361..4389e810a22 100644 --- a/networkx/generators/nonisomorphic_trees.py +++ b/networkx/generators/nonisomorphic_trees.py @@ -19,22 +19,20 @@ def nonisomorphic_trees(order, create="graph"): Parameters ---------- order : int - order of the desired tree(s) - - create : graph or matrix (default="Graph) - If graph is selected a list of trees will be returned, - if matrix is selected a list of adjacency matrix will - be returned - - Returns - ------- - G : List of NetworkX Graphs - - M : List of Adjacency matrices - - References - ---------- - + order of the desired tree(s) + + create : one of {"graph", "matrix"} (default="graph") + If ``"graph"`` is selected a list of ``Graph`` instances will be returned, + if matrix is selected a list of adjacency matrices will be returned. + + Yields + ------ + list + A list of nonisomorphic trees, in one of two formats depending on the + value of the `create` parameter: + - ``create="graph"``: yields a list of `networkx.Graph` instances + - ``create="matrix"``: yields a list of list-of-lists representing + adjacency matrices """ if order < 2: From ea6942ff6f8112d94cfa7fc5ac24eac2b82cce58 Mon Sep 17 00:00:00 2001 From: Aditi Juneja <91629733+Schefflera-Arboricola@users.noreply.github.com> Date: Sun, 4 Feb 2024 04:30:23 +0530 Subject: [PATCH 060/129] adding self loops related docs and tests for functions in `cluster.py` (#7261) * added self-loops notes * added test_self_loops_square_clustering * style fix * style fix --- networkx/algorithms/cluster.py | 6 ++++++ networkx/algorithms/tests/test_cluster.py | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/networkx/algorithms/cluster.py b/networkx/algorithms/cluster.py index 35c86daeb58..36e33f0226c 100644 --- a/networkx/algorithms/cluster.py +++ b/networkx/algorithms/cluster.py @@ -445,6 +445,10 @@ def transitivity(G): out : float Transitivity + Notes + ----- + Self loops are ignored. + Examples -------- >>> G = nx.complete_graph(5) @@ -581,6 +585,8 @@ def generalized_degree(G, nodes=None): Notes ----- + Self loops are ignored. + In a network of N nodes, the highest triangle multiplicity an edge can have is N-2. diff --git a/networkx/algorithms/tests/test_cluster.py b/networkx/algorithms/tests/test_cluster.py index d69f036ff6c..b656ba81553 100644 --- a/networkx/algorithms/tests/test_cluster.py +++ b/networkx/algorithms/tests/test_cluster.py @@ -457,6 +457,12 @@ def test_peng_square_clustering(self): G = nx.Graph([(1, 2), (1, 3), (2, 4), (3, 4), (3, 5), (3, 6)]) assert nx.square_clustering(G, [1])[1] == 1 / 3 + def test_self_loops_square_clustering(self): + G = nx.path_graph(5) + assert nx.square_clustering(G) == {0: 0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0} + G.add_edges_from([(0, 0), (1, 1), (2, 2)]) + assert nx.square_clustering(G) == {0: 1, 1: 0.5, 2: 0.2, 3: 0.0, 4: 0} + class TestAverageClustering: @classmethod From 1fa2414f48aae6fb80ad8a2706c8a369b9a05d03 Mon Sep 17 00:00:00 2001 From: "d.grigonis" Date: Tue, 6 Feb 2024 20:06:44 +0200 Subject: [PATCH 061/129] Add feature for drawing multi edges and labels (#7010) Adds drawing of multiedges and edge labels for multigraphs. Multiedge styles are specified using connectionstyle and fully support a subset of simple matplotlib curve-patch styles (i.e. 3-point arc). Includes significant reorganization of FancyArrowPatch generation. Co-authored-by: Dan Schult --- examples/drawing/plot_multigraphs.py | 70 +++ networkx/drawing/nx_pylab.py | 728 ++++++++++++++++++--------- networkx/drawing/tests/test_pylab.py | 60 ++- 3 files changed, 607 insertions(+), 251 deletions(-) create mode 100644 examples/drawing/plot_multigraphs.py diff --git a/examples/drawing/plot_multigraphs.py b/examples/drawing/plot_multigraphs.py new file mode 100644 index 00000000000..31886de568a --- /dev/null +++ b/examples/drawing/plot_multigraphs.py @@ -0,0 +1,70 @@ +""" +====================================== +Plotting MultiDiGraph Edges and Labels +====================================== + +This example shows how to plot edges and labels for a MultiDiGraph class object. +The same applies for DiGraph and MultiGraph class objects. + +4 Graphs are created, each with different number of edges between 2 nodes. +The final graph contains 4 edges in every node pair and 2 self loops per node. + +MultiGraph can have unlimited multi-edges that can be drawn +with different angles and theoretically node labels can remain visible. + +Multi-self-loops can be drawn in 4 directions of the node. +The subsequent loops will result in overlaps. +""" +import itertools as it +import numpy as np +import networkx as nx +import matplotlib.pyplot as plt + + +def draw_labeled_multigraph(G, attr_name, ax=None): + """ + Length of connectionstyle must be at least that of a maximum number of edges + between pair of nodes. This number is maximum one-sided connections + for directed graph and maximum total connections for undirected graph. + """ + # Works with arc3 and angle3 connectionstyles + connectionstyle = [f"arc3,rad={r}" for r in it.accumulate([0.15] * 4)] + # connectionstyle = [f"angle3,angleA={r}" for r in it.accumulate([30] * 4)] + + pos = nx.shell_layout(G) + nx.draw_networkx_nodes(G, pos, ax=ax) + nx.draw_networkx_labels(G, pos, font_size=20, ax=ax) + nx.draw_networkx_edges( + G, pos, edge_color="grey", connectionstyle=connectionstyle, ax=ax + ) + + labels = { + tuple(edge): f"{attr_name}={attrs[attr_name]}" + for *edge, attrs in G.edges(keys=True, data=True) + } + nx.draw_networkx_edge_labels( + G, + pos, + labels, + connectionstyle=connectionstyle, + label_pos=0.3, + font_color="blue", + bbox={"alpha": 0}, + ax=ax, + ) + + +nodes = "ABC" +prod = list(it.product(nodes, repeat=2)) +pair_dict = {f"Product x {i}": prod * i for i in range(1, 5)} + + +fig, axes = plt.subplots(2, 2) +for (name, pairs), ax in zip(pair_dict.items(), np.ravel(axes)): + G = nx.MultiDiGraph() + for i, (u, v) in enumerate(pairs): + G.add_edge(u, v, w=round(i / 3, 2)) + draw_labeled_multigraph(G, "w", ax) + ax.set_title(name) +fig.tight_layout() +plt.show() diff --git a/networkx/drawing/nx_pylab.py b/networkx/drawing/nx_pylab.py index 096e7b01d05..727111fb7ce 100644 --- a/networkx/drawing/nx_pylab.py +++ b/networkx/drawing/nx_pylab.py @@ -16,6 +16,8 @@ - :func:`matplotlib.pyplot.scatter` - :obj:`matplotlib.patches.FancyArrowPatch` """ +import collections +import itertools from numbers import Number import networkx as nx @@ -467,6 +469,197 @@ def draw_networkx_nodes( return node_collection +class FancyArrowFactory: + """Draw arrows with `matplotlib.patches.FancyarrowPatch`""" + + class ConnectionStyleFactory: + def __init__(self, connectionstyles, selfloop_height, ax=None): + import matplotlib as mpl + import matplotlib.path # call as mpl.path + import numpy as np + + self.ax = ax + self.mpl = mpl + self.np = np + self.base_connection_styles = [ + mpl.patches.ConnectionStyle(cs) for cs in connectionstyles + ] + self.n = len(self.base_connection_styles) + self.selfloop_height = selfloop_height + + def curved(self, edge_index): + return self.base_connection_styles[edge_index % self.n] + + def self_loop(self, edge_index): + def self_loop_connection(posA, posB, *args, **kwargs): + if not self.np.all(posA == posB): + raise nx.NetworkXError( + "`self_loop` connection style method" + "is only to be used for self-loops" + ) + # this is called with _screen space_ values + # so convert back to data space + data_loc = self.ax.transData.inverted().transform(posA) + v_shift = 0.1 * self.selfloop_height + h_shift = v_shift * 0.5 + # put the top of the loop first so arrow is not hidden by node + path = self.np.asarray( + [ + # 1 + [0, v_shift], + # 4 4 4 + [h_shift, v_shift], + [h_shift, 0], + [0, 0], + # 4 4 4 + [-h_shift, 0], + [-h_shift, v_shift], + [0, v_shift], + ] + ) + # Rotate self loop 90 deg. if more than 1 + # This will allow for maximum of 4 visible self loops + if edge_index % 4: + x, y = path.T + for _ in range(edge_index % 4): + x, y = y, -x + path = self.np.array([x, y]).T + return self.mpl.path.Path( + self.ax.transData.transform(data_loc + path), [1, 4, 4, 4, 4, 4, 4] + ) + + return self_loop_connection + + def __init__( + self, + edge_pos, + edgelist, + nodelist, + edge_indices, + node_size, + selfloop_height, + connectionstyle="arc3", + node_shape="o", + arrowstyle="-", + arrowsize=10, + edge_color="k", + alpha=None, + linewidth=1.0, + style="solid", + min_source_margin=0, + min_target_margin=0, + ax=None, + ): + import matplotlib as mpl + import matplotlib.patches # call as mpl.patches + import matplotlib.pyplot as plt + import numpy as np + + if isinstance(connectionstyle, str): + connectionstyle = [connectionstyle] + elif np.iterable(connectionstyle): + connectionstyle = list(connectionstyle) + else: + msg = "ConnectionStyleFactory arg `connectionstyle` must be str or iterable" + raise nx.NetworkXError(msg) + self.ax = ax + self.mpl = mpl + self.np = np + self.edge_pos = edge_pos + self.edgelist = edgelist + self.nodelist = nodelist + self.node_shape = node_shape + self.min_source_margin = min_source_margin + self.min_target_margin = min_target_margin + self.edge_indices = edge_indices + self.node_size = node_size + self.connectionstyle_factory = self.ConnectionStyleFactory( + connectionstyle, selfloop_height, ax + ) + self.arrowstyle = arrowstyle + self.arrowsize = arrowsize + self.arrow_colors = mpl.colors.colorConverter.to_rgba_array(edge_color, alpha) + self.linewidth = linewidth + self.style = style + if isinstance(arrowsize, list) and len(arrowsize) != len(edge_pos): + raise ValueError("arrowsize should have the same length as edgelist") + + def __call__(self, i): + (x1, y1), (x2, y2) = self.edge_pos[i] + shrink_source = 0 # space from source to tail + shrink_target = 0 # space from head to target + if self.np.iterable(self.node_size): # many node sizes + source, target = self.edgelist[i][:2] + source_node_size = self.node_size[self.nodelist.index(source)] + target_node_size = self.node_size[self.nodelist.index(target)] + shrink_source = self.to_marker_edge(source_node_size, self.node_shape) + shrink_target = self.to_marker_edge(target_node_size, self.node_shape) + else: + shrink_source = self.to_marker_edge(self.node_size, self.node_shape) + shrink_target = shrink_source + shrink_source = max(shrink_source, self.min_source_margin) + shrink_target = max(shrink_target, self.min_target_margin) + + # scale factor of arrow head + if isinstance(self.arrowsize, list): + mutation_scale = self.arrowsize[i] + else: + mutation_scale = self.arrowsize + + if len(self.arrow_colors) > i: + arrow_color = self.arrow_colors[i] + elif len(self.arrow_colors) == 1: + arrow_color = self.arrow_colors[0] + else: # Cycle through colors + arrow_color = self.arrow_colors[i % len(self.arrow_colors)] + + if self.np.iterable(self.linewidth): + if len(self.linewidth) > i: + linewidth = self.linewidth[i] + else: + linewidth = self.linewidth[i % len(self.linewidth)] + else: + linewidth = self.linewidth + + if ( + self.np.iterable(self.style) + and not isinstance(self.style, str) + and not isinstance(self.style, tuple) + ): + if len(self.style) > i: + linestyle = self.style[i] + else: # Cycle through styles + linestyle = self.style[i % len(self.style)] + else: + linestyle = self.style + + if x1 == x2 and y1 == y2: + connectionstyle = self.connectionstyle_factory.self_loop( + self.edge_indices[i] + ) + else: + connectionstyle = self.connectionstyle_factory.curved(self.edge_indices[i]) + return self.mpl.patches.FancyArrowPatch( + (x1, y1), + (x2, y2), + arrowstyle=self.arrowstyle, + shrinkA=shrink_source, + shrinkB=shrink_target, + mutation_scale=mutation_scale, + color=arrow_color, + linewidth=linewidth, + connectionstyle=connectionstyle, + linestyle=linestyle, + zorder=1, # arrows go behind nodes + ) + + def to_marker_edge(self, marker_size, marker): + if marker in "s^>vv