koichi12 commited on
Commit
1cd5f2c
·
verified ·
1 Parent(s): e72c1f5

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. tuning-competition-baseline/.venv/bin/pip3 +8 -0
  2. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/METADATA +105 -0
  3. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/WHEEL +5 -0
  4. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/__init__.cpython-311.pyc +0 -0
  5. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/boundary.cpython-311.pyc +0 -0
  6. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/bridges.cpython-311.pyc +0 -0
  7. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/chordal.cpython-311.pyc +0 -0
  8. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/clique.cpython-311.pyc +0 -0
  9. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/cuts.cpython-311.pyc +0 -0
  10. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/cycles.cpython-311.pyc +0 -0
  11. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dag.cpython-311.pyc +0 -0
  12. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dominance.cpython-311.pyc +0 -0
  13. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dominating.cpython-311.pyc +0 -0
  14. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/efficiency_measures.cpython-311.pyc +0 -0
  15. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/euler.cpython-311.pyc +0 -0
  16. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/graph_hashing.cpython-311.pyc +0 -0
  17. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/graphical.cpython-311.pyc +0 -0
  18. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/isolate.cpython-311.pyc +0 -0
  19. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/link_prediction.cpython-311.pyc +0 -0
  20. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/mis.cpython-311.pyc +0 -0
  21. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/planar_drawing.cpython-311.pyc +0 -0
  22. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/regular.cpython-311.pyc +0 -0
  23. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/similarity.cpython-311.pyc +0 -0
  24. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/structuralholes.cpython-311.pyc +0 -0
  25. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/summarization.cpython-311.pyc +0 -0
  26. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/threshold.cpython-311.pyc +0 -0
  27. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/triads.cpython-311.pyc +0 -0
  28. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-311.pyc +0 -0
  29. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/clique.py +258 -0
  30. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py +303 -0
  31. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_matching.py +8 -0
  32. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_ramsey.py +31 -0
  33. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py +280 -0
  34. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-311.pyc +0 -0
  35. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/modularity_max.py +448 -0
  36. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-311.pyc +0 -0
  37. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-311.pyc +0 -0
  38. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-311.pyc +0 -0
  39. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-311.pyc +0 -0
  40. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-311.pyc +0 -0
  41. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-311.pyc +0 -0
  42. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/__pycache__/__init__.cpython-311.pyc +0 -0
  43. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py +90 -0
  44. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/weakly_connected.py +196 -0
  45. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-311.pyc +0 -0
  46. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-311.pyc +0 -0
  47. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-311.pyc +0 -0
  48. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-311.pyc +0 -0
  49. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/ismags.py +1169 -0
  50. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/isomorph.py +248 -0
tuning-competition-baseline/.venv/bin/pip3 ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/home/koiwa/work/tuning-competition-baseline/.venv/bin/python3.11
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from pip._internal.cli.main import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/METADATA ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: Jinja2
3
+ Version: 3.1.3
4
+ Summary: A very fast and expressive template engine.
5
+ Home-page: https://palletsprojects.com/p/jinja/
6
+ Maintainer: Pallets
7
+ Maintainer-email: contact@palletsprojects.com
8
+ License: BSD-3-Clause
9
+ Project-URL: Donate, https://palletsprojects.com/donate
10
+ Project-URL: Documentation, https://jinja.palletsprojects.com/
11
+ Project-URL: Changes, https://jinja.palletsprojects.com/changes/
12
+ Project-URL: Source Code, https://github.com/pallets/jinja/
13
+ Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/
14
+ Project-URL: Chat, https://discord.gg/pallets
15
+ Classifier: Development Status :: 5 - Production/Stable
16
+ Classifier: Environment :: Web Environment
17
+ Classifier: Intended Audience :: Developers
18
+ Classifier: License :: OSI Approved :: BSD License
19
+ Classifier: Operating System :: OS Independent
20
+ Classifier: Programming Language :: Python
21
+ Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
22
+ Classifier: Topic :: Text Processing :: Markup :: HTML
23
+ Requires-Python: >=3.7
24
+ Description-Content-Type: text/x-rst
25
+ License-File: LICENSE.rst
26
+ Requires-Dist: MarkupSafe >=2.0
27
+ Provides-Extra: i18n
28
+ Requires-Dist: Babel >=2.7 ; extra == 'i18n'
29
+
30
+ Jinja
31
+ =====
32
+
33
+ Jinja is a fast, expressive, extensible templating engine. Special
34
+ placeholders in the template allow writing code similar to Python
35
+ syntax. Then the template is passed data to render the final document.
36
+
37
+ It includes:
38
+
39
+ - Template inheritance and inclusion.
40
+ - Define and import macros within templates.
41
+ - HTML templates can use autoescaping to prevent XSS from untrusted
42
+ user input.
43
+ - A sandboxed environment can safely render untrusted templates.
44
+ - AsyncIO support for generating templates and calling async
45
+ functions.
46
+ - I18N support with Babel.
47
+ - Templates are compiled to optimized Python code just-in-time and
48
+ cached, or can be compiled ahead-of-time.
49
+ - Exceptions point to the correct line in templates to make debugging
50
+ easier.
51
+ - Extensible filters, tests, functions, and even syntax.
52
+
53
+ Jinja's philosophy is that while application logic belongs in Python if
54
+ possible, it shouldn't make the template designer's job difficult by
55
+ restricting functionality too much.
56
+
57
+
58
+ Installing
59
+ ----------
60
+
61
+ Install and update using `pip`_:
62
+
63
+ .. code-block:: text
64
+
65
+ $ pip install -U Jinja2
66
+
67
+ .. _pip: https://pip.pypa.io/en/stable/getting-started/
68
+
69
+
70
+ In A Nutshell
71
+ -------------
72
+
73
+ .. code-block:: jinja
74
+
75
+ {% extends "base.html" %}
76
+ {% block title %}Members{% endblock %}
77
+ {% block content %}
78
+ <ul>
79
+ {% for user in users %}
80
+ <li><a href="{{ user.url }}">{{ user.username }}</a></li>
81
+ {% endfor %}
82
+ </ul>
83
+ {% endblock %}
84
+
85
+
86
+ Donate
87
+ ------
88
+
89
+ The Pallets organization develops and supports Jinja and other popular
90
+ packages. In order to grow the community of contributors and users, and
91
+ allow the maintainers to devote more time to the projects, `please
92
+ donate today`_.
93
+
94
+ .. _please donate today: https://palletsprojects.com/donate
95
+
96
+
97
+ Links
98
+ -----
99
+
100
+ - Documentation: https://jinja.palletsprojects.com/
101
+ - Changes: https://jinja.palletsprojects.com/changes/
102
+ - PyPI Releases: https://pypi.org/project/Jinja2/
103
+ - Source Code: https://github.com/pallets/jinja/
104
+ - Issue Tracker: https://github.com/pallets/jinja/issues/
105
+ - Chat: https://discord.gg/pallets
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.42.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (7.12 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/boundary.cpython-311.pyc ADDED
Binary file (6.49 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/bridges.cpython-311.pyc ADDED
Binary file (7.97 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/chordal.cpython-311.pyc ADDED
Binary file (17.8 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/clique.cpython-311.pyc ADDED
Binary file (35.8 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/cuts.cpython-311.pyc ADDED
Binary file (12.5 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/cycles.cpython-311.pyc ADDED
Binary file (55.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dag.cpython-311.pyc ADDED
Binary file (49.3 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dominance.cpython-311.pyc ADDED
Binary file (5.36 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dominating.cpython-311.pyc ADDED
Binary file (3.64 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/efficiency_measures.cpython-311.pyc ADDED
Binary file (5.82 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/euler.cpython-311.pyc ADDED
Binary file (18.9 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/graph_hashing.cpython-311.pyc ADDED
Binary file (14.4 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/graphical.cpython-311.pyc ADDED
Binary file (17.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/isolate.cpython-311.pyc ADDED
Binary file (3.43 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/link_prediction.cpython-311.pyc ADDED
Binary file (26.4 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/mis.cpython-311.pyc ADDED
Binary file (3.7 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/planar_drawing.cpython-311.pyc ADDED
Binary file (16.6 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/regular.cpython-311.pyc ADDED
Binary file (12.6 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/similarity.cpython-311.pyc ADDED
Binary file (73.5 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/structuralholes.cpython-311.pyc ADDED
Binary file (12.7 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/summarization.cpython-311.pyc ADDED
Binary file (27.6 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/threshold.cpython-311.pyc ADDED
Binary file (39.4 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/triads.cpython-311.pyc ADDED
Binary file (20.3 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-311.pyc ADDED
Binary file (1.6 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/clique.py ADDED
@@ -0,0 +1,258 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing large cliques and maximum independent sets."""
2
+ import networkx as nx
3
+ from networkx.algorithms.approximation import ramsey
4
+ from networkx.utils import not_implemented_for
5
+
6
+ __all__ = [
7
+ "clique_removal",
8
+ "max_clique",
9
+ "large_clique_size",
10
+ "maximum_independent_set",
11
+ ]
12
+
13
+
14
+ @not_implemented_for("directed")
15
+ @not_implemented_for("multigraph")
16
+ @nx._dispatch
17
+ def maximum_independent_set(G):
18
+ """Returns an approximate maximum independent set.
19
+
20
+ Independent set or stable set is a set of vertices in a graph, no two of
21
+ which are adjacent. That is, it is a set I of vertices such that for every
22
+ two vertices in I, there is no edge connecting the two. Equivalently, each
23
+ edge in the graph has at most one endpoint in I. The size of an independent
24
+ set is the number of vertices it contains [1]_.
25
+
26
+ A maximum independent set is a largest independent set for a given graph G
27
+ and its size is denoted $\\alpha(G)$. The problem of finding such a set is called
28
+ the maximum independent set problem and is an NP-hard optimization problem.
29
+ As such, it is unlikely that there exists an efficient algorithm for finding
30
+ a maximum independent set of a graph.
31
+
32
+ The Independent Set algorithm is based on [2]_.
33
+
34
+ Parameters
35
+ ----------
36
+ G : NetworkX graph
37
+ Undirected graph
38
+
39
+ Returns
40
+ -------
41
+ iset : Set
42
+ The apx-maximum independent set
43
+
44
+ Examples
45
+ --------
46
+ >>> G = nx.path_graph(10)
47
+ >>> nx.approximation.maximum_independent_set(G)
48
+ {0, 2, 4, 6, 9}
49
+
50
+ Raises
51
+ ------
52
+ NetworkXNotImplemented
53
+ If the graph is directed or is a multigraph.
54
+
55
+ Notes
56
+ -----
57
+ Finds the $O(|V|/(log|V|)^2)$ apx of independent set in the worst case.
58
+
59
+ References
60
+ ----------
61
+ .. [1] `Wikipedia: Independent set
62
+ <https://en.wikipedia.org/wiki/Independent_set_(graph_theory)>`_
63
+ .. [2] Boppana, R., & Halldórsson, M. M. (1992).
64
+ Approximating maximum independent sets by excluding subgraphs.
65
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
66
+ """
67
+ iset, _ = clique_removal(G)
68
+ return iset
69
+
70
+
71
+ @not_implemented_for("directed")
72
+ @not_implemented_for("multigraph")
73
+ @nx._dispatch
74
+ def max_clique(G):
75
+ r"""Find the Maximum Clique
76
+
77
+ Finds the $O(|V|/(log|V|)^2)$ apx of maximum clique/independent set
78
+ in the worst case.
79
+
80
+ Parameters
81
+ ----------
82
+ G : NetworkX graph
83
+ Undirected graph
84
+
85
+ Returns
86
+ -------
87
+ clique : set
88
+ The apx-maximum clique of the graph
89
+
90
+ Examples
91
+ --------
92
+ >>> G = nx.path_graph(10)
93
+ >>> nx.approximation.max_clique(G)
94
+ {8, 9}
95
+
96
+ Raises
97
+ ------
98
+ NetworkXNotImplemented
99
+ If the graph is directed or is a multigraph.
100
+
101
+ Notes
102
+ -----
103
+ A clique in an undirected graph G = (V, E) is a subset of the vertex set
104
+ `C \subseteq V` such that for every two vertices in C there exists an edge
105
+ connecting the two. This is equivalent to saying that the subgraph
106
+ induced by C is complete (in some cases, the term clique may also refer
107
+ to the subgraph).
108
+
109
+ A maximum clique is a clique of the largest possible size in a given graph.
110
+ The clique number `\omega(G)` of a graph G is the number of
111
+ vertices in a maximum clique in G. The intersection number of
112
+ G is the smallest number of cliques that together cover all edges of G.
113
+
114
+ https://en.wikipedia.org/wiki/Maximum_clique
115
+
116
+ References
117
+ ----------
118
+ .. [1] Boppana, R., & Halldórsson, M. M. (1992).
119
+ Approximating maximum independent sets by excluding subgraphs.
120
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
121
+ doi:10.1007/BF01994876
122
+ """
123
+ # finding the maximum clique in a graph is equivalent to finding
124
+ # the independent set in the complementary graph
125
+ cgraph = nx.complement(G)
126
+ iset, _ = clique_removal(cgraph)
127
+ return iset
128
+
129
+
130
+ @not_implemented_for("directed")
131
+ @not_implemented_for("multigraph")
132
+ @nx._dispatch
133
+ def clique_removal(G):
134
+ r"""Repeatedly remove cliques from the graph.
135
+
136
+ Results in a $O(|V|/(\log |V|)^2)$ approximation of maximum clique
137
+ and independent set. Returns the largest independent set found, along
138
+ with found maximal cliques.
139
+
140
+ Parameters
141
+ ----------
142
+ G : NetworkX graph
143
+ Undirected graph
144
+
145
+ Returns
146
+ -------
147
+ max_ind_cliques : (set, list) tuple
148
+ 2-tuple of Maximal Independent Set and list of maximal cliques (sets).
149
+
150
+ Examples
151
+ --------
152
+ >>> G = nx.path_graph(10)
153
+ >>> nx.approximation.clique_removal(G)
154
+ ({0, 2, 4, 6, 9}, [{0, 1}, {2, 3}, {4, 5}, {6, 7}, {8, 9}])
155
+
156
+ Raises
157
+ ------
158
+ NetworkXNotImplemented
159
+ If the graph is directed or is a multigraph.
160
+
161
+ References
162
+ ----------
163
+ .. [1] Boppana, R., & Halldórsson, M. M. (1992).
164
+ Approximating maximum independent sets by excluding subgraphs.
165
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
166
+ """
167
+ graph = G.copy()
168
+ c_i, i_i = ramsey.ramsey_R2(graph)
169
+ cliques = [c_i]
170
+ isets = [i_i]
171
+ while graph:
172
+ graph.remove_nodes_from(c_i)
173
+ c_i, i_i = ramsey.ramsey_R2(graph)
174
+ if c_i:
175
+ cliques.append(c_i)
176
+ if i_i:
177
+ isets.append(i_i)
178
+ # Determine the largest independent set as measured by cardinality.
179
+ maxiset = max(isets, key=len)
180
+ return maxiset, cliques
181
+
182
+
183
+ @not_implemented_for("directed")
184
+ @not_implemented_for("multigraph")
185
+ @nx._dispatch
186
+ def large_clique_size(G):
187
+ """Find the size of a large clique in a graph.
188
+
189
+ A *clique* is a subset of nodes in which each pair of nodes is
190
+ adjacent. This function is a heuristic for finding the size of a
191
+ large clique in the graph.
192
+
193
+ Parameters
194
+ ----------
195
+ G : NetworkX graph
196
+
197
+ Returns
198
+ -------
199
+ k: integer
200
+ The size of a large clique in the graph.
201
+
202
+ Examples
203
+ --------
204
+ >>> G = nx.path_graph(10)
205
+ >>> nx.approximation.large_clique_size(G)
206
+ 2
207
+
208
+ Raises
209
+ ------
210
+ NetworkXNotImplemented
211
+ If the graph is directed or is a multigraph.
212
+
213
+ Notes
214
+ -----
215
+ This implementation is from [1]_. Its worst case time complexity is
216
+ :math:`O(n d^2)`, where *n* is the number of nodes in the graph and
217
+ *d* is the maximum degree.
218
+
219
+ This function is a heuristic, which means it may work well in
220
+ practice, but there is no rigorous mathematical guarantee on the
221
+ ratio between the returned number and the actual largest clique size
222
+ in the graph.
223
+
224
+ References
225
+ ----------
226
+ .. [1] Pattabiraman, Bharath, et al.
227
+ "Fast Algorithms for the Maximum Clique Problem on Massive Graphs
228
+ with Applications to Overlapping Community Detection."
229
+ *Internet Mathematics* 11.4-5 (2015): 421--448.
230
+ <https://doi.org/10.1080/15427951.2014.986778>
231
+
232
+ See also
233
+ --------
234
+
235
+ :func:`networkx.algorithms.approximation.clique.max_clique`
236
+ A function that returns an approximate maximum clique with a
237
+ guarantee on the approximation ratio.
238
+
239
+ :mod:`networkx.algorithms.clique`
240
+ Functions for finding the exact maximum clique in a graph.
241
+
242
+ """
243
+ degrees = G.degree
244
+
245
+ def _clique_heuristic(G, U, size, best_size):
246
+ if not U:
247
+ return max(best_size, size)
248
+ u = max(U, key=degrees)
249
+ U.remove(u)
250
+ N_prime = {v for v in G[u] if degrees[v] >= best_size}
251
+ return _clique_heuristic(G, U & N_prime, size + 1, best_size)
252
+
253
+ best_size = 0
254
+ nodes = (u for u in G if degrees[u] >= best_size)
255
+ for u in nodes:
256
+ neighbors = {v for v in G[u] if degrees[v] >= best_size}
257
+ best_size = _clique_heuristic(G, neighbors, 1, best_size)
258
+ return best_size
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py ADDED
@@ -0,0 +1,303 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Test for approximation to k-components algorithm
2
+ import pytest
3
+
4
+ import networkx as nx
5
+ from networkx.algorithms.approximation import k_components
6
+ from networkx.algorithms.approximation.kcomponents import _AntiGraph, _same
7
+
8
+
9
+ def build_k_number_dict(k_components):
10
+ k_num = {}
11
+ for k, comps in sorted(k_components.items()):
12
+ for comp in comps:
13
+ for node in comp:
14
+ k_num[node] = k
15
+ return k_num
16
+
17
+
18
+ ##
19
+ # Some nice synthetic graphs
20
+ ##
21
+
22
+
23
+ def graph_example_1():
24
+ G = nx.convert_node_labels_to_integers(
25
+ nx.grid_graph([5, 5]), label_attribute="labels"
26
+ )
27
+ rlabels = nx.get_node_attributes(G, "labels")
28
+ labels = {v: k for k, v in rlabels.items()}
29
+
30
+ for nodes in [
31
+ (labels[(0, 0)], labels[(1, 0)]),
32
+ (labels[(0, 4)], labels[(1, 4)]),
33
+ (labels[(3, 0)], labels[(4, 0)]),
34
+ (labels[(3, 4)], labels[(4, 4)]),
35
+ ]:
36
+ new_node = G.order() + 1
37
+ # Petersen graph is triconnected
38
+ P = nx.petersen_graph()
39
+ G = nx.disjoint_union(G, P)
40
+ # Add two edges between the grid and P
41
+ G.add_edge(new_node + 1, nodes[0])
42
+ G.add_edge(new_node, nodes[1])
43
+ # K5 is 4-connected
44
+ K = nx.complete_graph(5)
45
+ G = nx.disjoint_union(G, K)
46
+ # Add three edges between P and K5
47
+ G.add_edge(new_node + 2, new_node + 11)
48
+ G.add_edge(new_node + 3, new_node + 12)
49
+ G.add_edge(new_node + 4, new_node + 13)
50
+ # Add another K5 sharing a node
51
+ G = nx.disjoint_union(G, K)
52
+ nbrs = G[new_node + 10]
53
+ G.remove_node(new_node + 10)
54
+ for nbr in nbrs:
55
+ G.add_edge(new_node + 17, nbr)
56
+ G.add_edge(new_node + 16, new_node + 5)
57
+ return G
58
+
59
+
60
+ def torrents_and_ferraro_graph():
61
+ G = nx.convert_node_labels_to_integers(
62
+ nx.grid_graph([5, 5]), label_attribute="labels"
63
+ )
64
+ rlabels = nx.get_node_attributes(G, "labels")
65
+ labels = {v: k for k, v in rlabels.items()}
66
+
67
+ for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]:
68
+ new_node = G.order() + 1
69
+ # Petersen graph is triconnected
70
+ P = nx.petersen_graph()
71
+ G = nx.disjoint_union(G, P)
72
+ # Add two edges between the grid and P
73
+ G.add_edge(new_node + 1, nodes[0])
74
+ G.add_edge(new_node, nodes[1])
75
+ # K5 is 4-connected
76
+ K = nx.complete_graph(5)
77
+ G = nx.disjoint_union(G, K)
78
+ # Add three edges between P and K5
79
+ G.add_edge(new_node + 2, new_node + 11)
80
+ G.add_edge(new_node + 3, new_node + 12)
81
+ G.add_edge(new_node + 4, new_node + 13)
82
+ # Add another K5 sharing a node
83
+ G = nx.disjoint_union(G, K)
84
+ nbrs = G[new_node + 10]
85
+ G.remove_node(new_node + 10)
86
+ for nbr in nbrs:
87
+ G.add_edge(new_node + 17, nbr)
88
+ # Commenting this makes the graph not biconnected !!
89
+ # This stupid mistake make one reviewer very angry :P
90
+ G.add_edge(new_node + 16, new_node + 8)
91
+
92
+ for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]:
93
+ new_node = G.order() + 1
94
+ # Petersen graph is triconnected
95
+ P = nx.petersen_graph()
96
+ G = nx.disjoint_union(G, P)
97
+ # Add two edges between the grid and P
98
+ G.add_edge(new_node + 1, nodes[0])
99
+ G.add_edge(new_node, nodes[1])
100
+ # K5 is 4-connected
101
+ K = nx.complete_graph(5)
102
+ G = nx.disjoint_union(G, K)
103
+ # Add three edges between P and K5
104
+ G.add_edge(new_node + 2, new_node + 11)
105
+ G.add_edge(new_node + 3, new_node + 12)
106
+ G.add_edge(new_node + 4, new_node + 13)
107
+ # Add another K5 sharing two nodes
108
+ G = nx.disjoint_union(G, K)
109
+ nbrs = G[new_node + 10]
110
+ G.remove_node(new_node + 10)
111
+ for nbr in nbrs:
112
+ G.add_edge(new_node + 17, nbr)
113
+ nbrs2 = G[new_node + 9]
114
+ G.remove_node(new_node + 9)
115
+ for nbr in nbrs2:
116
+ G.add_edge(new_node + 18, nbr)
117
+ return G
118
+
119
+
120
+ # Helper function
121
+
122
+
123
+ def _check_connectivity(G):
124
+ result = k_components(G)
125
+ for k, components in result.items():
126
+ if k < 3:
127
+ continue
128
+ for component in components:
129
+ C = G.subgraph(component)
130
+ K = nx.node_connectivity(C)
131
+ assert K >= k
132
+
133
+
134
+ def test_torrents_and_ferraro_graph():
135
+ G = torrents_and_ferraro_graph()
136
+ _check_connectivity(G)
137
+
138
+
139
+ def test_example_1():
140
+ G = graph_example_1()
141
+ _check_connectivity(G)
142
+
143
+
144
+ def test_karate_0():
145
+ G = nx.karate_club_graph()
146
+ _check_connectivity(G)
147
+
148
+
149
+ def test_karate_1():
150
+ karate_k_num = {
151
+ 0: 4,
152
+ 1: 4,
153
+ 2: 4,
154
+ 3: 4,
155
+ 4: 3,
156
+ 5: 3,
157
+ 6: 3,
158
+ 7: 4,
159
+ 8: 4,
160
+ 9: 2,
161
+ 10: 3,
162
+ 11: 1,
163
+ 12: 2,
164
+ 13: 4,
165
+ 14: 2,
166
+ 15: 2,
167
+ 16: 2,
168
+ 17: 2,
169
+ 18: 2,
170
+ 19: 3,
171
+ 20: 2,
172
+ 21: 2,
173
+ 22: 2,
174
+ 23: 3,
175
+ 24: 3,
176
+ 25: 3,
177
+ 26: 2,
178
+ 27: 3,
179
+ 28: 3,
180
+ 29: 3,
181
+ 30: 4,
182
+ 31: 3,
183
+ 32: 4,
184
+ 33: 4,
185
+ }
186
+ approx_karate_k_num = karate_k_num.copy()
187
+ approx_karate_k_num[24] = 2
188
+ approx_karate_k_num[25] = 2
189
+ G = nx.karate_club_graph()
190
+ k_comps = k_components(G)
191
+ k_num = build_k_number_dict(k_comps)
192
+ assert k_num in (karate_k_num, approx_karate_k_num)
193
+
194
+
195
+ def test_example_1_detail_3_and_4():
196
+ G = graph_example_1()
197
+ result = k_components(G)
198
+ # In this example graph there are 8 3-components, 4 with 15 nodes
199
+ # and 4 with 5 nodes.
200
+ assert len(result[3]) == 8
201
+ assert len([c for c in result[3] if len(c) == 15]) == 4
202
+ assert len([c for c in result[3] if len(c) == 5]) == 4
203
+ # There are also 8 4-components all with 5 nodes.
204
+ assert len(result[4]) == 8
205
+ assert all(len(c) == 5 for c in result[4])
206
+ # Finally check that the k-components detected have actually node
207
+ # connectivity >= k.
208
+ for k, components in result.items():
209
+ if k < 3:
210
+ continue
211
+ for component in components:
212
+ K = nx.node_connectivity(G.subgraph(component))
213
+ assert K >= k
214
+
215
+
216
+ def test_directed():
217
+ with pytest.raises(nx.NetworkXNotImplemented):
218
+ G = nx.gnp_random_graph(10, 0.4, directed=True)
219
+ kc = k_components(G)
220
+
221
+
222
+ def test_same():
223
+ equal = {"A": 2, "B": 2, "C": 2}
224
+ slightly_different = {"A": 2, "B": 1, "C": 2}
225
+ different = {"A": 2, "B": 8, "C": 18}
226
+ assert _same(equal)
227
+ assert not _same(slightly_different)
228
+ assert _same(slightly_different, tol=1)
229
+ assert not _same(different)
230
+ assert not _same(different, tol=4)
231
+
232
+
233
+ class TestAntiGraph:
234
+ @classmethod
235
+ def setup_class(cls):
236
+ cls.Gnp = nx.gnp_random_graph(20, 0.8, seed=42)
237
+ cls.Anp = _AntiGraph(nx.complement(cls.Gnp))
238
+ cls.Gd = nx.davis_southern_women_graph()
239
+ cls.Ad = _AntiGraph(nx.complement(cls.Gd))
240
+ cls.Gk = nx.karate_club_graph()
241
+ cls.Ak = _AntiGraph(nx.complement(cls.Gk))
242
+ cls.GA = [(cls.Gnp, cls.Anp), (cls.Gd, cls.Ad), (cls.Gk, cls.Ak)]
243
+
244
+ def test_size(self):
245
+ for G, A in self.GA:
246
+ n = G.order()
247
+ s = len(list(G.edges())) + len(list(A.edges()))
248
+ assert s == (n * (n - 1)) / 2
249
+
250
+ def test_degree(self):
251
+ for G, A in self.GA:
252
+ assert sorted(G.degree()) == sorted(A.degree())
253
+
254
+ def test_core_number(self):
255
+ for G, A in self.GA:
256
+ assert nx.core_number(G) == nx.core_number(A)
257
+
258
+ def test_connected_components(self):
259
+ # ccs are same unless isolated nodes or any node has degree=len(G)-1
260
+ # graphs in self.GA avoid this problem
261
+ for G, A in self.GA:
262
+ gc = [set(c) for c in nx.connected_components(G)]
263
+ ac = [set(c) for c in nx.connected_components(A)]
264
+ for comp in ac:
265
+ assert comp in gc
266
+
267
+ def test_adj(self):
268
+ for G, A in self.GA:
269
+ for n, nbrs in G.adj.items():
270
+ a_adj = sorted((n, sorted(ad)) for n, ad in A.adj.items())
271
+ g_adj = sorted((n, sorted(ad)) for n, ad in G.adj.items())
272
+ assert a_adj == g_adj
273
+
274
+ def test_adjacency(self):
275
+ for G, A in self.GA:
276
+ a_adj = list(A.adjacency())
277
+ for n, nbrs in G.adjacency():
278
+ assert (n, set(nbrs)) in a_adj
279
+
280
+ def test_neighbors(self):
281
+ for G, A in self.GA:
282
+ node = list(G.nodes())[0]
283
+ assert set(G.neighbors(node)) == set(A.neighbors(node))
284
+
285
+ def test_node_not_in_graph(self):
286
+ for G, A in self.GA:
287
+ node = "non_existent_node"
288
+ pytest.raises(nx.NetworkXError, A.neighbors, node)
289
+ pytest.raises(nx.NetworkXError, G.neighbors, node)
290
+
291
+ def test_degree_thingraph(self):
292
+ for G, A in self.GA:
293
+ node = list(G.nodes())[0]
294
+ nodes = list(G.nodes())[1:4]
295
+ assert G.degree(node) == A.degree(node)
296
+ assert sum(d for n, d in G.degree()) == sum(d for n, d in A.degree())
297
+ # AntiGraph is a ThinGraph, so all the weights are 1
298
+ assert sum(d for n, d in A.degree()) == sum(
299
+ d for n, d in A.degree(weight="weight")
300
+ )
301
+ assert sum(d for n, d in G.degree(nodes)) == sum(
302
+ d for n, d in A.degree(nodes)
303
+ )
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_matching.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+ import networkx.algorithms.approximation as a
3
+
4
+
5
+ def test_min_maximal_matching():
6
+ # smoke test
7
+ G = nx.Graph()
8
+ assert len(a.min_maximal_matching(G)) == 0
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_ramsey.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+ import networkx.algorithms.approximation as apxa
3
+
4
+
5
+ def test_ramsey():
6
+ # this should only find the complete graph
7
+ graph = nx.complete_graph(10)
8
+ c, i = apxa.ramsey_R2(graph)
9
+ cdens = nx.density(graph.subgraph(c))
10
+ assert cdens == 1.0, "clique not correctly found by ramsey!"
11
+ idens = nx.density(graph.subgraph(i))
12
+ assert idens == 0.0, "i-set not correctly found by ramsey!"
13
+
14
+ # this trivial graph has no cliques. should just find i-sets
15
+ graph = nx.trivial_graph()
16
+ c, i = apxa.ramsey_R2(graph)
17
+ assert c == {0}, "clique not correctly found by ramsey!"
18
+ assert i == {0}, "i-set not correctly found by ramsey!"
19
+
20
+ graph = nx.barbell_graph(10, 5, nx.Graph())
21
+ c, i = apxa.ramsey_R2(graph)
22
+ cdens = nx.density(graph.subgraph(c))
23
+ assert cdens == 1.0, "clique not correctly found by ramsey!"
24
+ idens = nx.density(graph.subgraph(i))
25
+ assert idens == 0.0, "i-set not correctly found by ramsey!"
26
+
27
+ # add self-loops and test again
28
+ graph.add_edges_from([(n, n) for n in range(0, len(graph), 2)])
29
+ cc, ii = apxa.ramsey_R2(graph)
30
+ assert cc == c
31
+ assert ii == i
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py ADDED
@@ -0,0 +1,280 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import itertools
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms.approximation import (
5
+ treewidth_min_degree,
6
+ treewidth_min_fill_in,
7
+ )
8
+ from networkx.algorithms.approximation.treewidth import (
9
+ MinDegreeHeuristic,
10
+ min_fill_in_heuristic,
11
+ )
12
+
13
+
14
+ def is_tree_decomp(graph, decomp):
15
+ """Check if the given tree decomposition is valid."""
16
+ for x in graph.nodes():
17
+ appear_once = False
18
+ for bag in decomp.nodes():
19
+ if x in bag:
20
+ appear_once = True
21
+ break
22
+ assert appear_once
23
+
24
+ # Check if each connected pair of nodes are at least once together in a bag
25
+ for x, y in graph.edges():
26
+ appear_together = False
27
+ for bag in decomp.nodes():
28
+ if x in bag and y in bag:
29
+ appear_together = True
30
+ break
31
+ assert appear_together
32
+
33
+ # Check if the nodes associated with vertex v form a connected subset of T
34
+ for v in graph.nodes():
35
+ subset = []
36
+ for bag in decomp.nodes():
37
+ if v in bag:
38
+ subset.append(bag)
39
+ sub_graph = decomp.subgraph(subset)
40
+ assert nx.is_connected(sub_graph)
41
+
42
+
43
+ class TestTreewidthMinDegree:
44
+ """Unit tests for the min_degree function"""
45
+
46
+ @classmethod
47
+ def setup_class(cls):
48
+ """Setup for different kinds of trees"""
49
+ cls.complete = nx.Graph()
50
+ cls.complete.add_edge(1, 2)
51
+ cls.complete.add_edge(2, 3)
52
+ cls.complete.add_edge(1, 3)
53
+
54
+ cls.small_tree = nx.Graph()
55
+ cls.small_tree.add_edge(1, 3)
56
+ cls.small_tree.add_edge(4, 3)
57
+ cls.small_tree.add_edge(2, 3)
58
+ cls.small_tree.add_edge(3, 5)
59
+ cls.small_tree.add_edge(5, 6)
60
+ cls.small_tree.add_edge(5, 7)
61
+ cls.small_tree.add_edge(6, 7)
62
+
63
+ cls.deterministic_graph = nx.Graph()
64
+ cls.deterministic_graph.add_edge(0, 1) # deg(0) = 1
65
+
66
+ cls.deterministic_graph.add_edge(1, 2) # deg(1) = 2
67
+
68
+ cls.deterministic_graph.add_edge(2, 3)
69
+ cls.deterministic_graph.add_edge(2, 4) # deg(2) = 3
70
+
71
+ cls.deterministic_graph.add_edge(3, 4)
72
+ cls.deterministic_graph.add_edge(3, 5)
73
+ cls.deterministic_graph.add_edge(3, 6) # deg(3) = 4
74
+
75
+ cls.deterministic_graph.add_edge(4, 5)
76
+ cls.deterministic_graph.add_edge(4, 6)
77
+ cls.deterministic_graph.add_edge(4, 7) # deg(4) = 5
78
+
79
+ cls.deterministic_graph.add_edge(5, 6)
80
+ cls.deterministic_graph.add_edge(5, 7)
81
+ cls.deterministic_graph.add_edge(5, 8)
82
+ cls.deterministic_graph.add_edge(5, 9) # deg(5) = 6
83
+
84
+ cls.deterministic_graph.add_edge(6, 7)
85
+ cls.deterministic_graph.add_edge(6, 8)
86
+ cls.deterministic_graph.add_edge(6, 9) # deg(6) = 6
87
+
88
+ cls.deterministic_graph.add_edge(7, 8)
89
+ cls.deterministic_graph.add_edge(7, 9) # deg(7) = 5
90
+
91
+ cls.deterministic_graph.add_edge(8, 9) # deg(8) = 4
92
+
93
+ def test_petersen_graph(self):
94
+ """Test Petersen graph tree decomposition result"""
95
+ G = nx.petersen_graph()
96
+ _, decomp = treewidth_min_degree(G)
97
+ is_tree_decomp(G, decomp)
98
+
99
+ def test_small_tree_treewidth(self):
100
+ """Test small tree
101
+
102
+ Test if the computed treewidth of the known self.small_tree is 2.
103
+ As we know which value we can expect from our heuristic, values other
104
+ than two are regressions
105
+ """
106
+ G = self.small_tree
107
+ # the order of removal should be [1,2,4]3[5,6,7]
108
+ # (with [] denoting any order of the containing nodes)
109
+ # resulting in treewidth 2 for the heuristic
110
+ treewidth, _ = treewidth_min_fill_in(G)
111
+ assert treewidth == 2
112
+
113
+ def test_heuristic_abort(self):
114
+ """Test heuristic abort condition for fully connected graph"""
115
+ graph = {}
116
+ for u in self.complete:
117
+ graph[u] = set()
118
+ for v in self.complete[u]:
119
+ if u != v: # ignore self-loop
120
+ graph[u].add(v)
121
+
122
+ deg_heuristic = MinDegreeHeuristic(graph)
123
+ node = deg_heuristic.best_node(graph)
124
+ if node is None:
125
+ pass
126
+ else:
127
+ assert False
128
+
129
+ def test_empty_graph(self):
130
+ """Test empty graph"""
131
+ G = nx.Graph()
132
+ _, _ = treewidth_min_degree(G)
133
+
134
+ def test_two_component_graph(self):
135
+ G = nx.Graph()
136
+ G.add_node(1)
137
+ G.add_node(2)
138
+ treewidth, _ = treewidth_min_degree(G)
139
+ assert treewidth == 0
140
+
141
+ def test_not_sortable_nodes(self):
142
+ G = nx.Graph([(0, "a")])
143
+ treewidth_min_degree(G)
144
+
145
+ def test_heuristic_first_steps(self):
146
+ """Test first steps of min_degree heuristic"""
147
+ graph = {
148
+ n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph
149
+ }
150
+ deg_heuristic = MinDegreeHeuristic(graph)
151
+ elim_node = deg_heuristic.best_node(graph)
152
+ print(f"Graph {graph}:")
153
+ steps = []
154
+
155
+ while elim_node is not None:
156
+ print(f"Removing {elim_node}:")
157
+ steps.append(elim_node)
158
+ nbrs = graph[elim_node]
159
+
160
+ for u, v in itertools.permutations(nbrs, 2):
161
+ if v not in graph[u]:
162
+ graph[u].add(v)
163
+
164
+ for u in graph:
165
+ if elim_node in graph[u]:
166
+ graph[u].remove(elim_node)
167
+
168
+ del graph[elim_node]
169
+ print(f"Graph {graph}:")
170
+ elim_node = deg_heuristic.best_node(graph)
171
+
172
+ # check only the first 5 elements for equality
173
+ assert steps[:5] == [0, 1, 2, 3, 4]
174
+
175
+
176
+ class TestTreewidthMinFillIn:
177
+ """Unit tests for the treewidth_min_fill_in function."""
178
+
179
+ @classmethod
180
+ def setup_class(cls):
181
+ """Setup for different kinds of trees"""
182
+ cls.complete = nx.Graph()
183
+ cls.complete.add_edge(1, 2)
184
+ cls.complete.add_edge(2, 3)
185
+ cls.complete.add_edge(1, 3)
186
+
187
+ cls.small_tree = nx.Graph()
188
+ cls.small_tree.add_edge(1, 2)
189
+ cls.small_tree.add_edge(2, 3)
190
+ cls.small_tree.add_edge(3, 4)
191
+ cls.small_tree.add_edge(1, 4)
192
+ cls.small_tree.add_edge(2, 4)
193
+ cls.small_tree.add_edge(4, 5)
194
+ cls.small_tree.add_edge(5, 6)
195
+ cls.small_tree.add_edge(5, 7)
196
+ cls.small_tree.add_edge(6, 7)
197
+
198
+ cls.deterministic_graph = nx.Graph()
199
+ cls.deterministic_graph.add_edge(1, 2)
200
+ cls.deterministic_graph.add_edge(1, 3)
201
+ cls.deterministic_graph.add_edge(3, 4)
202
+ cls.deterministic_graph.add_edge(2, 4)
203
+ cls.deterministic_graph.add_edge(3, 5)
204
+ cls.deterministic_graph.add_edge(4, 5)
205
+ cls.deterministic_graph.add_edge(3, 6)
206
+ cls.deterministic_graph.add_edge(5, 6)
207
+
208
+ def test_petersen_graph(self):
209
+ """Test Petersen graph tree decomposition result"""
210
+ G = nx.petersen_graph()
211
+ _, decomp = treewidth_min_fill_in(G)
212
+ is_tree_decomp(G, decomp)
213
+
214
+ def test_small_tree_treewidth(self):
215
+ """Test if the computed treewidth of the known self.small_tree is 2"""
216
+ G = self.small_tree
217
+ # the order of removal should be [1,2,4]3[5,6,7]
218
+ # (with [] denoting any order of the containing nodes)
219
+ # resulting in treewidth 2 for the heuristic
220
+ treewidth, _ = treewidth_min_fill_in(G)
221
+ assert treewidth == 2
222
+
223
+ def test_heuristic_abort(self):
224
+ """Test if min_fill_in returns None for fully connected graph"""
225
+ graph = {}
226
+ for u in self.complete:
227
+ graph[u] = set()
228
+ for v in self.complete[u]:
229
+ if u != v: # ignore self-loop
230
+ graph[u].add(v)
231
+ next_node = min_fill_in_heuristic(graph)
232
+ if next_node is None:
233
+ pass
234
+ else:
235
+ assert False
236
+
237
+ def test_empty_graph(self):
238
+ """Test empty graph"""
239
+ G = nx.Graph()
240
+ _, _ = treewidth_min_fill_in(G)
241
+
242
+ def test_two_component_graph(self):
243
+ G = nx.Graph()
244
+ G.add_node(1)
245
+ G.add_node(2)
246
+ treewidth, _ = treewidth_min_fill_in(G)
247
+ assert treewidth == 0
248
+
249
+ def test_not_sortable_nodes(self):
250
+ G = nx.Graph([(0, "a")])
251
+ treewidth_min_fill_in(G)
252
+
253
+ def test_heuristic_first_steps(self):
254
+ """Test first steps of min_fill_in heuristic"""
255
+ graph = {
256
+ n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph
257
+ }
258
+ print(f"Graph {graph}:")
259
+ elim_node = min_fill_in_heuristic(graph)
260
+ steps = []
261
+
262
+ while elim_node is not None:
263
+ print(f"Removing {elim_node}:")
264
+ steps.append(elim_node)
265
+ nbrs = graph[elim_node]
266
+
267
+ for u, v in itertools.permutations(nbrs, 2):
268
+ if v not in graph[u]:
269
+ graph[u].add(v)
270
+
271
+ for u in graph:
272
+ if elim_node in graph[u]:
273
+ graph[u].remove(elim_node)
274
+
275
+ del graph[elim_node]
276
+ print(f"Graph {graph}:")
277
+ elim_node = min_fill_in_heuristic(graph)
278
+
279
+ # check only the first 2 elements for equality
280
+ assert steps[:2] == [6, 5]
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-311.pyc ADDED
Binary file (8.61 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/modularity_max.py ADDED
@@ -0,0 +1,448 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for detecting communities based on modularity."""
2
+
3
+ from collections import defaultdict
4
+
5
+ import networkx as nx
6
+ from networkx.algorithms.community.quality import modularity
7
+ from networkx.utils import not_implemented_for
8
+ from networkx.utils.mapped_queue import MappedQueue
9
+
10
+ __all__ = [
11
+ "greedy_modularity_communities",
12
+ "naive_greedy_modularity_communities",
13
+ ]
14
+
15
+
16
+ def _greedy_modularity_communities_generator(G, weight=None, resolution=1):
17
+ r"""Yield community partitions of G and the modularity change at each step.
18
+
19
+ This function performs Clauset-Newman-Moore greedy modularity maximization [2]_
20
+ At each step of the process it yields the change in modularity that will occur in
21
+ the next step followed by yielding the new community partition after that step.
22
+
23
+ Greedy modularity maximization begins with each node in its own community
24
+ and repeatedly joins the pair of communities that lead to the largest
25
+ modularity until one community contains all nodes (the partition has one set).
26
+
27
+ This function maximizes the generalized modularity, where `resolution`
28
+ is the resolution parameter, often expressed as $\gamma$.
29
+ See :func:`~networkx.algorithms.community.quality.modularity`.
30
+
31
+ Parameters
32
+ ----------
33
+ G : NetworkX graph
34
+
35
+ weight : string or None, optional (default=None)
36
+ The name of an edge attribute that holds the numerical value used
37
+ as a weight. If None, then each edge has weight 1.
38
+ The degree is the sum of the edge weights adjacent to the node.
39
+
40
+ resolution : float (default=1)
41
+ If resolution is less than 1, modularity favors larger communities.
42
+ Greater than 1 favors smaller communities.
43
+
44
+ Yields
45
+ ------
46
+ Alternating yield statements produce the following two objects:
47
+
48
+ communities: dict_values
49
+ A dict_values of frozensets of nodes, one for each community.
50
+ This represents a partition of the nodes of the graph into communities.
51
+ The first yield is the partition with each node in its own community.
52
+
53
+ dq: float
54
+ The change in modularity when merging the next two communities
55
+ that leads to the largest modularity.
56
+
57
+ See Also
58
+ --------
59
+ modularity
60
+
61
+ References
62
+ ----------
63
+ .. [1] Newman, M. E. J. "Networks: An Introduction", page 224
64
+ Oxford University Press 2011.
65
+ .. [2] Clauset, A., Newman, M. E., & Moore, C.
66
+ "Finding community structure in very large networks."
67
+ Physical Review E 70(6), 2004.
68
+ .. [3] Reichardt and Bornholdt "Statistical Mechanics of Community
69
+ Detection" Phys. Rev. E74, 2006.
70
+ .. [4] Newman, M. E. J."Analysis of weighted networks"
71
+ Physical Review E 70(5 Pt 2):056131, 2004.
72
+ """
73
+ directed = G.is_directed()
74
+ N = G.number_of_nodes()
75
+
76
+ # Count edges (or the sum of edge-weights for weighted graphs)
77
+ m = G.size(weight)
78
+ q0 = 1 / m
79
+
80
+ # Calculate degrees (notation from the papers)
81
+ # a : the fraction of (weighted) out-degree for each node
82
+ # b : the fraction of (weighted) in-degree for each node
83
+ if directed:
84
+ a = {node: deg_out * q0 for node, deg_out in G.out_degree(weight=weight)}
85
+ b = {node: deg_in * q0 for node, deg_in in G.in_degree(weight=weight)}
86
+ else:
87
+ a = b = {node: deg * q0 * 0.5 for node, deg in G.degree(weight=weight)}
88
+
89
+ # this preliminary step collects the edge weights for each node pair
90
+ # It handles multigraph and digraph and works fine for graph.
91
+ dq_dict = defaultdict(lambda: defaultdict(float))
92
+ for u, v, wt in G.edges(data=weight, default=1):
93
+ if u == v:
94
+ continue
95
+ dq_dict[u][v] += wt
96
+ dq_dict[v][u] += wt
97
+
98
+ # now scale and subtract the expected edge-weights term
99
+ for u, nbrdict in dq_dict.items():
100
+ for v, wt in nbrdict.items():
101
+ dq_dict[u][v] = q0 * wt - resolution * (a[u] * b[v] + b[u] * a[v])
102
+
103
+ # Use -dq to get a max_heap instead of a min_heap
104
+ # dq_heap holds a heap for each node's neighbors
105
+ dq_heap = {u: MappedQueue({(u, v): -dq for v, dq in dq_dict[u].items()}) for u in G}
106
+ # H -> all_dq_heap holds a heap with the best items for each node
107
+ H = MappedQueue([dq_heap[n].heap[0] for n in G if len(dq_heap[n]) > 0])
108
+
109
+ # Initialize single-node communities
110
+ communities = {n: frozenset([n]) for n in G}
111
+ yield communities.values()
112
+
113
+ # Merge the two communities that lead to the largest modularity
114
+ while len(H) > 1:
115
+ # Find best merge
116
+ # Remove from heap of row maxes
117
+ # Ties will be broken by choosing the pair with lowest min community id
118
+ try:
119
+ negdq, u, v = H.pop()
120
+ except IndexError:
121
+ break
122
+ dq = -negdq
123
+ yield dq
124
+ # Remove best merge from row u heap
125
+ dq_heap[u].pop()
126
+ # Push new row max onto H
127
+ if len(dq_heap[u]) > 0:
128
+ H.push(dq_heap[u].heap[0])
129
+ # If this element was also at the root of row v, we need to remove the
130
+ # duplicate entry from H
131
+ if dq_heap[v].heap[0] == (v, u):
132
+ H.remove((v, u))
133
+ # Remove best merge from row v heap
134
+ dq_heap[v].remove((v, u))
135
+ # Push new row max onto H
136
+ if len(dq_heap[v]) > 0:
137
+ H.push(dq_heap[v].heap[0])
138
+ else:
139
+ # Duplicate wasn't in H, just remove from row v heap
140
+ dq_heap[v].remove((v, u))
141
+
142
+ # Perform merge
143
+ communities[v] = frozenset(communities[u] | communities[v])
144
+ del communities[u]
145
+
146
+ # Get neighbor communities connected to the merged communities
147
+ u_nbrs = set(dq_dict[u])
148
+ v_nbrs = set(dq_dict[v])
149
+ all_nbrs = (u_nbrs | v_nbrs) - {u, v}
150
+ both_nbrs = u_nbrs & v_nbrs
151
+ # Update dq for merge of u into v
152
+ for w in all_nbrs:
153
+ # Calculate new dq value
154
+ if w in both_nbrs:
155
+ dq_vw = dq_dict[v][w] + dq_dict[u][w]
156
+ elif w in v_nbrs:
157
+ dq_vw = dq_dict[v][w] - resolution * (a[u] * b[w] + a[w] * b[u])
158
+ else: # w in u_nbrs
159
+ dq_vw = dq_dict[u][w] - resolution * (a[v] * b[w] + a[w] * b[v])
160
+ # Update rows v and w
161
+ for row, col in [(v, w), (w, v)]:
162
+ dq_heap_row = dq_heap[row]
163
+ # Update dict for v,w only (u is removed below)
164
+ dq_dict[row][col] = dq_vw
165
+ # Save old max of per-row heap
166
+ if len(dq_heap_row) > 0:
167
+ d_oldmax = dq_heap_row.heap[0]
168
+ else:
169
+ d_oldmax = None
170
+ # Add/update heaps
171
+ d = (row, col)
172
+ d_negdq = -dq_vw
173
+ # Save old value for finding heap index
174
+ if w in v_nbrs:
175
+ # Update existing element in per-row heap
176
+ dq_heap_row.update(d, d, priority=d_negdq)
177
+ else:
178
+ # We're creating a new nonzero element, add to heap
179
+ dq_heap_row.push(d, priority=d_negdq)
180
+ # Update heap of row maxes if necessary
181
+ if d_oldmax is None:
182
+ # No entries previously in this row, push new max
183
+ H.push(d, priority=d_negdq)
184
+ else:
185
+ # We've updated an entry in this row, has the max changed?
186
+ row_max = dq_heap_row.heap[0]
187
+ if d_oldmax != row_max or d_oldmax.priority != row_max.priority:
188
+ H.update(d_oldmax, row_max)
189
+
190
+ # Remove row/col u from dq_dict matrix
191
+ for w in dq_dict[u]:
192
+ # Remove from dict
193
+ dq_old = dq_dict[w][u]
194
+ del dq_dict[w][u]
195
+ # Remove from heaps if we haven't already
196
+ if w != v:
197
+ # Remove both row and column
198
+ for row, col in [(w, u), (u, w)]:
199
+ dq_heap_row = dq_heap[row]
200
+ # Check if replaced dq is row max
201
+ d_old = (row, col)
202
+ if dq_heap_row.heap[0] == d_old:
203
+ # Update per-row heap and heap of row maxes
204
+ dq_heap_row.remove(d_old)
205
+ H.remove(d_old)
206
+ # Update row max
207
+ if len(dq_heap_row) > 0:
208
+ H.push(dq_heap_row.heap[0])
209
+ else:
210
+ # Only update per-row heap
211
+ dq_heap_row.remove(d_old)
212
+
213
+ del dq_dict[u]
214
+ # Mark row u as deleted, but keep placeholder
215
+ dq_heap[u] = MappedQueue()
216
+ # Merge u into v and update a
217
+ a[v] += a[u]
218
+ a[u] = 0
219
+ if directed:
220
+ b[v] += b[u]
221
+ b[u] = 0
222
+
223
+ yield communities.values()
224
+
225
+
226
+ @nx._dispatch(edge_attrs="weight")
227
+ def greedy_modularity_communities(
228
+ G,
229
+ weight=None,
230
+ resolution=1,
231
+ cutoff=1,
232
+ best_n=None,
233
+ ):
234
+ r"""Find communities in G using greedy modularity maximization.
235
+
236
+ This function uses Clauset-Newman-Moore greedy modularity maximization [2]_
237
+ to find the community partition with the largest modularity.
238
+
239
+ Greedy modularity maximization begins with each node in its own community
240
+ and repeatedly joins the pair of communities that lead to the largest
241
+ modularity until no further increase in modularity is possible (a maximum).
242
+ Two keyword arguments adjust the stopping condition. `cutoff` is a lower
243
+ limit on the number of communities so you can stop the process before
244
+ reaching a maximum (used to save computation time). `best_n` is an upper
245
+ limit on the number of communities so you can make the process continue
246
+ until at most n communities remain even if the maximum modularity occurs
247
+ for more. To obtain exactly n communities, set both `cutoff` and `best_n` to n.
248
+
249
+ This function maximizes the generalized modularity, where `resolution`
250
+ is the resolution parameter, often expressed as $\gamma$.
251
+ See :func:`~networkx.algorithms.community.quality.modularity`.
252
+
253
+ Parameters
254
+ ----------
255
+ G : NetworkX graph
256
+
257
+ weight : string or None, optional (default=None)
258
+ The name of an edge attribute that holds the numerical value used
259
+ as a weight. If None, then each edge has weight 1.
260
+ The degree is the sum of the edge weights adjacent to the node.
261
+
262
+ resolution : float, optional (default=1)
263
+ If resolution is less than 1, modularity favors larger communities.
264
+ Greater than 1 favors smaller communities.
265
+
266
+ cutoff : int, optional (default=1)
267
+ A minimum number of communities below which the merging process stops.
268
+ The process stops at this number of communities even if modularity
269
+ is not maximized. The goal is to let the user stop the process early.
270
+ The process stops before the cutoff if it finds a maximum of modularity.
271
+
272
+ best_n : int or None, optional (default=None)
273
+ A maximum number of communities above which the merging process will
274
+ not stop. This forces community merging to continue after modularity
275
+ starts to decrease until `best_n` communities remain.
276
+ If ``None``, don't force it to continue beyond a maximum.
277
+
278
+ Raises
279
+ ------
280
+ ValueError : If the `cutoff` or `best_n` value is not in the range
281
+ ``[1, G.number_of_nodes()]``, or if `best_n` < `cutoff`.
282
+
283
+ Returns
284
+ -------
285
+ communities: list
286
+ A list of frozensets of nodes, one for each community.
287
+ Sorted by length with largest communities first.
288
+
289
+ Examples
290
+ --------
291
+ >>> G = nx.karate_club_graph()
292
+ >>> c = nx.community.greedy_modularity_communities(G)
293
+ >>> sorted(c[0])
294
+ [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
295
+
296
+ See Also
297
+ --------
298
+ modularity
299
+
300
+ References
301
+ ----------
302
+ .. [1] Newman, M. E. J. "Networks: An Introduction", page 224
303
+ Oxford University Press 2011.
304
+ .. [2] Clauset, A., Newman, M. E., & Moore, C.
305
+ "Finding community structure in very large networks."
306
+ Physical Review E 70(6), 2004.
307
+ .. [3] Reichardt and Bornholdt "Statistical Mechanics of Community
308
+ Detection" Phys. Rev. E74, 2006.
309
+ .. [4] Newman, M. E. J."Analysis of weighted networks"
310
+ Physical Review E 70(5 Pt 2):056131, 2004.
311
+ """
312
+ if (cutoff < 1) or (cutoff > G.number_of_nodes()):
313
+ raise ValueError(f"cutoff must be between 1 and {len(G)}. Got {cutoff}.")
314
+ if best_n is not None:
315
+ if (best_n < 1) or (best_n > G.number_of_nodes()):
316
+ raise ValueError(f"best_n must be between 1 and {len(G)}. Got {best_n}.")
317
+ if best_n < cutoff:
318
+ raise ValueError(f"Must have best_n >= cutoff. Got {best_n} < {cutoff}")
319
+ if best_n == 1:
320
+ return [set(G)]
321
+ else:
322
+ best_n = G.number_of_nodes()
323
+
324
+ # retrieve generator object to construct output
325
+ community_gen = _greedy_modularity_communities_generator(
326
+ G, weight=weight, resolution=resolution
327
+ )
328
+
329
+ # construct the first best community
330
+ communities = next(community_gen)
331
+
332
+ # continue merging communities until one of the breaking criteria is satisfied
333
+ while len(communities) > cutoff:
334
+ try:
335
+ dq = next(community_gen)
336
+ # StopIteration occurs when communities are the connected components
337
+ except StopIteration:
338
+ communities = sorted(communities, key=len, reverse=True)
339
+ # if best_n requires more merging, merge big sets for highest modularity
340
+ while len(communities) > best_n:
341
+ comm1, comm2, *rest = communities
342
+ communities = [comm1 ^ comm2]
343
+ communities.extend(rest)
344
+ return communities
345
+
346
+ # keep going unless max_mod is reached or best_n says to merge more
347
+ if dq < 0 and len(communities) <= best_n:
348
+ break
349
+ communities = next(community_gen)
350
+
351
+ return sorted(communities, key=len, reverse=True)
352
+
353
+
354
+ @not_implemented_for("directed")
355
+ @not_implemented_for("multigraph")
356
+ @nx._dispatch(edge_attrs="weight")
357
+ def naive_greedy_modularity_communities(G, resolution=1, weight=None):
358
+ r"""Find communities in G using greedy modularity maximization.
359
+
360
+ This implementation is O(n^4), much slower than alternatives, but it is
361
+ provided as an easy-to-understand reference implementation.
362
+
363
+ Greedy modularity maximization begins with each node in its own community
364
+ and joins the pair of communities that most increases modularity until no
365
+ such pair exists.
366
+
367
+ This function maximizes the generalized modularity, where `resolution`
368
+ is the resolution parameter, often expressed as $\gamma$.
369
+ See :func:`~networkx.algorithms.community.quality.modularity`.
370
+
371
+ Parameters
372
+ ----------
373
+ G : NetworkX graph
374
+ Graph must be simple and undirected.
375
+
376
+ resolution : float (default=1)
377
+ If resolution is less than 1, modularity favors larger communities.
378
+ Greater than 1 favors smaller communities.
379
+
380
+ weight : string or None, optional (default=None)
381
+ The name of an edge attribute that holds the numerical value used
382
+ as a weight. If None, then each edge has weight 1.
383
+ The degree is the sum of the edge weights adjacent to the node.
384
+
385
+ Returns
386
+ -------
387
+ list
388
+ A list of sets of nodes, one for each community.
389
+ Sorted by length with largest communities first.
390
+
391
+ Examples
392
+ --------
393
+ >>> G = nx.karate_club_graph()
394
+ >>> c = nx.community.naive_greedy_modularity_communities(G)
395
+ >>> sorted(c[0])
396
+ [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
397
+
398
+ See Also
399
+ --------
400
+ greedy_modularity_communities
401
+ modularity
402
+ """
403
+ # First create one community for each node
404
+ communities = [frozenset([u]) for u in G.nodes()]
405
+ # Track merges
406
+ merges = []
407
+ # Greedily merge communities until no improvement is possible
408
+ old_modularity = None
409
+ new_modularity = modularity(G, communities, resolution=resolution, weight=weight)
410
+ while old_modularity is None or new_modularity > old_modularity:
411
+ # Save modularity for comparison
412
+ old_modularity = new_modularity
413
+ # Find best pair to merge
414
+ trial_communities = list(communities)
415
+ to_merge = None
416
+ for i, u in enumerate(communities):
417
+ for j, v in enumerate(communities):
418
+ # Skip i==j and empty communities
419
+ if j <= i or len(u) == 0 or len(v) == 0:
420
+ continue
421
+ # Merge communities u and v
422
+ trial_communities[j] = u | v
423
+ trial_communities[i] = frozenset([])
424
+ trial_modularity = modularity(
425
+ G, trial_communities, resolution=resolution, weight=weight
426
+ )
427
+ if trial_modularity >= new_modularity:
428
+ # Check if strictly better or tie
429
+ if trial_modularity > new_modularity:
430
+ # Found new best, save modularity and group indexes
431
+ new_modularity = trial_modularity
432
+ to_merge = (i, j, new_modularity - old_modularity)
433
+ elif to_merge and min(i, j) < min(to_merge[0], to_merge[1]):
434
+ # Break ties by choosing pair with lowest min id
435
+ new_modularity = trial_modularity
436
+ to_merge = (i, j, new_modularity - old_modularity)
437
+ # Un-merge
438
+ trial_communities[i] = u
439
+ trial_communities[j] = v
440
+ if to_merge is not None:
441
+ # If the best merge improves modularity, use it
442
+ merges.append(to_merge)
443
+ i, j, dq = to_merge
444
+ u, v = communities[i], communities[j]
445
+ communities[j] = u | v
446
+ communities[i] = frozenset([])
447
+ # Remove empty communities and sort
448
+ return sorted((c for c in communities if len(c) > 0), key=len, reverse=True)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-311.pyc ADDED
Binary file (6.22 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-311.pyc ADDED
Binary file (13.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-311.pyc ADDED
Binary file (9.59 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-311.pyc ADDED
Binary file (4 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-311.pyc ADDED
Binary file (13.8 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-311.pyc ADDED
Binary file (14.9 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (237 Bytes). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx import NetworkXNotImplemented
5
+
6
+
7
+ class TestWeaklyConnected:
8
+ @classmethod
9
+ def setup_class(cls):
10
+ cls.gc = []
11
+ G = nx.DiGraph()
12
+ G.add_edges_from(
13
+ [
14
+ (1, 2),
15
+ (2, 3),
16
+ (2, 8),
17
+ (3, 4),
18
+ (3, 7),
19
+ (4, 5),
20
+ (5, 3),
21
+ (5, 6),
22
+ (7, 4),
23
+ (7, 6),
24
+ (8, 1),
25
+ (8, 7),
26
+ ]
27
+ )
28
+ C = [[3, 4, 5, 7], [1, 2, 8], [6]]
29
+ cls.gc.append((G, C))
30
+
31
+ G = nx.DiGraph()
32
+ G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)])
33
+ C = [[2, 3, 4], [1]]
34
+ cls.gc.append((G, C))
35
+
36
+ G = nx.DiGraph()
37
+ G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)])
38
+ C = [[1, 2, 3]]
39
+ cls.gc.append((G, C))
40
+
41
+ # Eppstein's tests
42
+ G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []})
43
+ C = [[0], [1], [2], [3], [4], [5], [6]]
44
+ cls.gc.append((G, C))
45
+
46
+ G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]})
47
+ C = [[0, 1, 2], [3, 4]]
48
+ cls.gc.append((G, C))
49
+
50
+ def test_weakly_connected_components(self):
51
+ for G, C in self.gc:
52
+ U = G.to_undirected()
53
+ w = {frozenset(g) for g in nx.weakly_connected_components(G)}
54
+ c = {frozenset(g) for g in nx.connected_components(U)}
55
+ assert w == c
56
+
57
+ def test_number_weakly_connected_components(self):
58
+ for G, C in self.gc:
59
+ U = G.to_undirected()
60
+ w = nx.number_weakly_connected_components(G)
61
+ c = nx.number_connected_components(U)
62
+ assert w == c
63
+
64
+ def test_is_weakly_connected(self):
65
+ for G, C in self.gc:
66
+ U = G.to_undirected()
67
+ assert nx.is_weakly_connected(G) == nx.is_connected(U)
68
+
69
+ def test_null_graph(self):
70
+ G = nx.DiGraph()
71
+ assert list(nx.weakly_connected_components(G)) == []
72
+ assert nx.number_weakly_connected_components(G) == 0
73
+ with pytest.raises(nx.NetworkXPointlessConcept):
74
+ next(nx.is_weakly_connected(G))
75
+
76
+ def test_connected_raise(self):
77
+ G = nx.Graph()
78
+ with pytest.raises(NetworkXNotImplemented):
79
+ next(nx.weakly_connected_components(G))
80
+ pytest.raises(NetworkXNotImplemented, nx.number_weakly_connected_components, G)
81
+ pytest.raises(NetworkXNotImplemented, nx.is_weakly_connected, G)
82
+
83
+ def test_connected_mutability(self):
84
+ DG = nx.path_graph(5, create_using=nx.DiGraph)
85
+ G = nx.disjoint_union(DG, DG)
86
+ seen = set()
87
+ for component in nx.weakly_connected_components(G):
88
+ assert len(seen & component) == 0
89
+ seen.update(component)
90
+ component.clear()
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/weakly_connected.py ADDED
@@ -0,0 +1,196 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Weakly connected components."""
2
+ import networkx as nx
3
+ from networkx.utils.decorators import not_implemented_for
4
+
5
+ __all__ = [
6
+ "number_weakly_connected_components",
7
+ "weakly_connected_components",
8
+ "is_weakly_connected",
9
+ ]
10
+
11
+
12
+ @not_implemented_for("undirected")
13
+ @nx._dispatch
14
+ def weakly_connected_components(G):
15
+ """Generate weakly connected components of G.
16
+
17
+ Parameters
18
+ ----------
19
+ G : NetworkX graph
20
+ A directed graph
21
+
22
+ Returns
23
+ -------
24
+ comp : generator of sets
25
+ A generator of sets of nodes, one for each weakly connected
26
+ component of G.
27
+
28
+ Raises
29
+ ------
30
+ NetworkXNotImplemented
31
+ If G is undirected.
32
+
33
+ Examples
34
+ --------
35
+ Generate a sorted list of weakly connected components, largest first.
36
+
37
+ >>> G = nx.path_graph(4, create_using=nx.DiGraph())
38
+ >>> nx.add_path(G, [10, 11, 12])
39
+ >>> [
40
+ ... len(c)
41
+ ... for c in sorted(nx.weakly_connected_components(G), key=len, reverse=True)
42
+ ... ]
43
+ [4, 3]
44
+
45
+ If you only want the largest component, it's more efficient to
46
+ use max instead of sort:
47
+
48
+ >>> largest_cc = max(nx.weakly_connected_components(G), key=len)
49
+
50
+ See Also
51
+ --------
52
+ connected_components
53
+ strongly_connected_components
54
+
55
+ Notes
56
+ -----
57
+ For directed graphs only.
58
+
59
+ """
60
+ seen = set()
61
+ for v in G:
62
+ if v not in seen:
63
+ c = set(_plain_bfs(G, v))
64
+ seen.update(c)
65
+ yield c
66
+
67
+
68
+ @not_implemented_for("undirected")
69
+ @nx._dispatch
70
+ def number_weakly_connected_components(G):
71
+ """Returns the number of weakly connected components in G.
72
+
73
+ Parameters
74
+ ----------
75
+ G : NetworkX graph
76
+ A directed graph.
77
+
78
+ Returns
79
+ -------
80
+ n : integer
81
+ Number of weakly connected components
82
+
83
+ Raises
84
+ ------
85
+ NetworkXNotImplemented
86
+ If G is undirected.
87
+
88
+ Examples
89
+ --------
90
+ >>> G = nx.DiGraph([(0, 1), (2, 1), (3, 4)])
91
+ >>> nx.number_weakly_connected_components(G)
92
+ 2
93
+
94
+ See Also
95
+ --------
96
+ weakly_connected_components
97
+ number_connected_components
98
+ number_strongly_connected_components
99
+
100
+ Notes
101
+ -----
102
+ For directed graphs only.
103
+
104
+ """
105
+ return sum(1 for wcc in weakly_connected_components(G))
106
+
107
+
108
+ @not_implemented_for("undirected")
109
+ @nx._dispatch
110
+ def is_weakly_connected(G):
111
+ """Test directed graph for weak connectivity.
112
+
113
+ A directed graph is weakly connected if and only if the graph
114
+ is connected when the direction of the edge between nodes is ignored.
115
+
116
+ Note that if a graph is strongly connected (i.e. the graph is connected
117
+ even when we account for directionality), it is by definition weakly
118
+ connected as well.
119
+
120
+ Parameters
121
+ ----------
122
+ G : NetworkX Graph
123
+ A directed graph.
124
+
125
+ Returns
126
+ -------
127
+ connected : bool
128
+ True if the graph is weakly connected, False otherwise.
129
+
130
+ Raises
131
+ ------
132
+ NetworkXNotImplemented
133
+ If G is undirected.
134
+
135
+ Examples
136
+ --------
137
+ >>> G = nx.DiGraph([(0, 1), (2, 1)])
138
+ >>> G.add_node(3)
139
+ >>> nx.is_weakly_connected(G) # node 3 is not connected to the graph
140
+ False
141
+ >>> G.add_edge(2, 3)
142
+ >>> nx.is_weakly_connected(G)
143
+ True
144
+
145
+ See Also
146
+ --------
147
+ is_strongly_connected
148
+ is_semiconnected
149
+ is_connected
150
+ is_biconnected
151
+ weakly_connected_components
152
+
153
+ Notes
154
+ -----
155
+ For directed graphs only.
156
+
157
+ """
158
+ if len(G) == 0:
159
+ raise nx.NetworkXPointlessConcept(
160
+ """Connectivity is undefined for the null graph."""
161
+ )
162
+
163
+ return len(next(weakly_connected_components(G))) == len(G)
164
+
165
+
166
+ def _plain_bfs(G, source):
167
+ """A fast BFS node generator
168
+
169
+ The direction of the edge between nodes is ignored.
170
+
171
+ For directed graphs only.
172
+
173
+ """
174
+ n = len(G)
175
+ Gsucc = G._succ
176
+ Gpred = G._pred
177
+ seen = {source}
178
+ nextlevel = [source]
179
+
180
+ yield source
181
+ while nextlevel:
182
+ thislevel = nextlevel
183
+ nextlevel = []
184
+ for v in thislevel:
185
+ for w in Gsucc[v]:
186
+ if w not in seen:
187
+ seen.add(w)
188
+ nextlevel.append(w)
189
+ yield w
190
+ for w in Gpred[v]:
191
+ if w not in seen:
192
+ seen.add(w)
193
+ nextlevel.append(w)
194
+ yield w
195
+ if len(seen) == n:
196
+ return
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-311.pyc ADDED
Binary file (35.5 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-311.pyc ADDED
Binary file (18 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-311.pyc ADDED
Binary file (16 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-311.pyc ADDED
Binary file (11.7 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/ismags.py ADDED
@@ -0,0 +1,1169 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ISMAGS Algorithm
3
+ ================
4
+
5
+ Provides a Python implementation of the ISMAGS algorithm. [1]_
6
+
7
+ It is capable of finding (subgraph) isomorphisms between two graphs, taking the
8
+ symmetry of the subgraph into account. In most cases the VF2 algorithm is
9
+ faster (at least on small graphs) than this implementation, but in some cases
10
+ there is an exponential number of isomorphisms that are symmetrically
11
+ equivalent. In that case, the ISMAGS algorithm will provide only one solution
12
+ per symmetry group.
13
+
14
+ >>> petersen = nx.petersen_graph()
15
+ >>> ismags = nx.isomorphism.ISMAGS(petersen, petersen)
16
+ >>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=False))
17
+ >>> len(isomorphisms)
18
+ 120
19
+ >>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=True))
20
+ >>> answer = [{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}]
21
+ >>> answer == isomorphisms
22
+ True
23
+
24
+ In addition, this implementation also provides an interface to find the
25
+ largest common induced subgraph [2]_ between any two graphs, again taking
26
+ symmetry into account. Given `graph` and `subgraph` the algorithm will remove
27
+ nodes from the `subgraph` until `subgraph` is isomorphic to a subgraph of
28
+ `graph`. Since only the symmetry of `subgraph` is taken into account it is
29
+ worth thinking about how you provide your graphs:
30
+
31
+ >>> graph1 = nx.path_graph(4)
32
+ >>> graph2 = nx.star_graph(3)
33
+ >>> ismags = nx.isomorphism.ISMAGS(graph1, graph2)
34
+ >>> ismags.is_isomorphic()
35
+ False
36
+ >>> largest_common_subgraph = list(ismags.largest_common_subgraph())
37
+ >>> answer = [{1: 0, 0: 1, 2: 2}, {2: 0, 1: 1, 3: 2}]
38
+ >>> answer == largest_common_subgraph
39
+ True
40
+ >>> ismags2 = nx.isomorphism.ISMAGS(graph2, graph1)
41
+ >>> largest_common_subgraph = list(ismags2.largest_common_subgraph())
42
+ >>> answer = [
43
+ ... {1: 0, 0: 1, 2: 2},
44
+ ... {1: 0, 0: 1, 3: 2},
45
+ ... {2: 0, 0: 1, 1: 2},
46
+ ... {2: 0, 0: 1, 3: 2},
47
+ ... {3: 0, 0: 1, 1: 2},
48
+ ... {3: 0, 0: 1, 2: 2},
49
+ ... ]
50
+ >>> answer == largest_common_subgraph
51
+ True
52
+
53
+ However, when not taking symmetry into account, it doesn't matter:
54
+
55
+ >>> largest_common_subgraph = list(ismags.largest_common_subgraph(symmetry=False))
56
+ >>> answer = [
57
+ ... {1: 0, 0: 1, 2: 2},
58
+ ... {1: 0, 2: 1, 0: 2},
59
+ ... {2: 0, 1: 1, 3: 2},
60
+ ... {2: 0, 3: 1, 1: 2},
61
+ ... {1: 0, 0: 1, 2: 3},
62
+ ... {1: 0, 2: 1, 0: 3},
63
+ ... {2: 0, 1: 1, 3: 3},
64
+ ... {2: 0, 3: 1, 1: 3},
65
+ ... {1: 0, 0: 2, 2: 3},
66
+ ... {1: 0, 2: 2, 0: 3},
67
+ ... {2: 0, 1: 2, 3: 3},
68
+ ... {2: 0, 3: 2, 1: 3},
69
+ ... ]
70
+ >>> answer == largest_common_subgraph
71
+ True
72
+ >>> largest_common_subgraph = list(ismags2.largest_common_subgraph(symmetry=False))
73
+ >>> answer = [
74
+ ... {1: 0, 0: 1, 2: 2},
75
+ ... {1: 0, 0: 1, 3: 2},
76
+ ... {2: 0, 0: 1, 1: 2},
77
+ ... {2: 0, 0: 1, 3: 2},
78
+ ... {3: 0, 0: 1, 1: 2},
79
+ ... {3: 0, 0: 1, 2: 2},
80
+ ... {1: 1, 0: 2, 2: 3},
81
+ ... {1: 1, 0: 2, 3: 3},
82
+ ... {2: 1, 0: 2, 1: 3},
83
+ ... {2: 1, 0: 2, 3: 3},
84
+ ... {3: 1, 0: 2, 1: 3},
85
+ ... {3: 1, 0: 2, 2: 3},
86
+ ... ]
87
+ >>> answer == largest_common_subgraph
88
+ True
89
+
90
+ Notes
91
+ -----
92
+ - The current implementation works for undirected graphs only. The algorithm
93
+ in general should work for directed graphs as well though.
94
+ - Node keys for both provided graphs need to be fully orderable as well as
95
+ hashable.
96
+ - Node and edge equality is assumed to be transitive: if A is equal to B, and
97
+ B is equal to C, then A is equal to C.
98
+
99
+ References
100
+ ----------
101
+ .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
102
+ M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
103
+ Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
104
+ Enumeration", PLoS One 9(5): e97896, 2014.
105
+ https://doi.org/10.1371/journal.pone.0097896
106
+ .. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph
107
+ """
108
+
109
+ __all__ = ["ISMAGS"]
110
+
111
+ import itertools
112
+ from collections import Counter, defaultdict
113
+ from functools import reduce, wraps
114
+
115
+
116
+ def are_all_equal(iterable):
117
+ """
118
+ Returns ``True`` if and only if all elements in `iterable` are equal; and
119
+ ``False`` otherwise.
120
+
121
+ Parameters
122
+ ----------
123
+ iterable: collections.abc.Iterable
124
+ The container whose elements will be checked.
125
+
126
+ Returns
127
+ -------
128
+ bool
129
+ ``True`` iff all elements in `iterable` compare equal, ``False``
130
+ otherwise.
131
+ """
132
+ try:
133
+ shape = iterable.shape
134
+ except AttributeError:
135
+ pass
136
+ else:
137
+ if len(shape) > 1:
138
+ message = "The function does not works on multidimensional arrays."
139
+ raise NotImplementedError(message) from None
140
+
141
+ iterator = iter(iterable)
142
+ first = next(iterator, None)
143
+ return all(item == first for item in iterator)
144
+
145
+
146
+ def make_partitions(items, test):
147
+ """
148
+ Partitions items into sets based on the outcome of ``test(item1, item2)``.
149
+ Pairs of items for which `test` returns `True` end up in the same set.
150
+
151
+ Parameters
152
+ ----------
153
+ items : collections.abc.Iterable[collections.abc.Hashable]
154
+ Items to partition
155
+ test : collections.abc.Callable[collections.abc.Hashable, collections.abc.Hashable]
156
+ A function that will be called with 2 arguments, taken from items.
157
+ Should return `True` if those 2 items need to end up in the same
158
+ partition, and `False` otherwise.
159
+
160
+ Returns
161
+ -------
162
+ list[set]
163
+ A list of sets, with each set containing part of the items in `items`,
164
+ such that ``all(test(*pair) for pair in itertools.combinations(set, 2))
165
+ == True``
166
+
167
+ Notes
168
+ -----
169
+ The function `test` is assumed to be transitive: if ``test(a, b)`` and
170
+ ``test(b, c)`` return ``True``, then ``test(a, c)`` must also be ``True``.
171
+ """
172
+ partitions = []
173
+ for item in items:
174
+ for partition in partitions:
175
+ p_item = next(iter(partition))
176
+ if test(item, p_item):
177
+ partition.add(item)
178
+ break
179
+ else: # No break
180
+ partitions.append({item})
181
+ return partitions
182
+
183
+
184
+ def partition_to_color(partitions):
185
+ """
186
+ Creates a dictionary that maps each item in each partition to the index of
187
+ the partition to which it belongs.
188
+
189
+ Parameters
190
+ ----------
191
+ partitions: collections.abc.Sequence[collections.abc.Iterable]
192
+ As returned by :func:`make_partitions`.
193
+
194
+ Returns
195
+ -------
196
+ dict
197
+ """
198
+ colors = {}
199
+ for color, keys in enumerate(partitions):
200
+ for key in keys:
201
+ colors[key] = color
202
+ return colors
203
+
204
+
205
+ def intersect(collection_of_sets):
206
+ """
207
+ Given an collection of sets, returns the intersection of those sets.
208
+
209
+ Parameters
210
+ ----------
211
+ collection_of_sets: collections.abc.Collection[set]
212
+ A collection of sets.
213
+
214
+ Returns
215
+ -------
216
+ set
217
+ An intersection of all sets in `collection_of_sets`. Will have the same
218
+ type as the item initially taken from `collection_of_sets`.
219
+ """
220
+ collection_of_sets = list(collection_of_sets)
221
+ first = collection_of_sets.pop()
222
+ out = reduce(set.intersection, collection_of_sets, set(first))
223
+ return type(first)(out)
224
+
225
+
226
+ class ISMAGS:
227
+ """
228
+ Implements the ISMAGS subgraph matching algorithm. [1]_ ISMAGS stands for
229
+ "Index-based Subgraph Matching Algorithm with General Symmetries". As the
230
+ name implies, it is symmetry aware and will only generate non-symmetric
231
+ isomorphisms.
232
+
233
+ Notes
234
+ -----
235
+ The implementation imposes additional conditions compared to the VF2
236
+ algorithm on the graphs provided and the comparison functions
237
+ (:attr:`node_equality` and :attr:`edge_equality`):
238
+
239
+ - Node keys in both graphs must be orderable as well as hashable.
240
+ - Equality must be transitive: if A is equal to B, and B is equal to C,
241
+ then A must be equal to C.
242
+
243
+ Attributes
244
+ ----------
245
+ graph: networkx.Graph
246
+ subgraph: networkx.Graph
247
+ node_equality: collections.abc.Callable
248
+ The function called to see if two nodes should be considered equal.
249
+ It's signature looks like this:
250
+ ``f(graph1: networkx.Graph, node1, graph2: networkx.Graph, node2) -> bool``.
251
+ `node1` is a node in `graph1`, and `node2` a node in `graph2`.
252
+ Constructed from the argument `node_match`.
253
+ edge_equality: collections.abc.Callable
254
+ The function called to see if two edges should be considered equal.
255
+ It's signature looks like this:
256
+ ``f(graph1: networkx.Graph, edge1, graph2: networkx.Graph, edge2) -> bool``.
257
+ `edge1` is an edge in `graph1`, and `edge2` an edge in `graph2`.
258
+ Constructed from the argument `edge_match`.
259
+
260
+ References
261
+ ----------
262
+ .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
263
+ M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
264
+ Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
265
+ Enumeration", PLoS One 9(5): e97896, 2014.
266
+ https://doi.org/10.1371/journal.pone.0097896
267
+ """
268
+
269
+ def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None):
270
+ """
271
+ Parameters
272
+ ----------
273
+ graph: networkx.Graph
274
+ subgraph: networkx.Graph
275
+ node_match: collections.abc.Callable or None
276
+ Function used to determine whether two nodes are equivalent. Its
277
+ signature should look like ``f(n1: dict, n2: dict) -> bool``, with
278
+ `n1` and `n2` node property dicts. See also
279
+ :func:`~networkx.algorithms.isomorphism.categorical_node_match` and
280
+ friends.
281
+ If `None`, all nodes are considered equal.
282
+ edge_match: collections.abc.Callable or None
283
+ Function used to determine whether two edges are equivalent. Its
284
+ signature should look like ``f(e1: dict, e2: dict) -> bool``, with
285
+ `e1` and `e2` edge property dicts. See also
286
+ :func:`~networkx.algorithms.isomorphism.categorical_edge_match` and
287
+ friends.
288
+ If `None`, all edges are considered equal.
289
+ cache: collections.abc.Mapping
290
+ A cache used for caching graph symmetries.
291
+ """
292
+ # TODO: graph and subgraph setter methods that invalidate the caches.
293
+ # TODO: allow for precomputed partitions and colors
294
+ self.graph = graph
295
+ self.subgraph = subgraph
296
+ self._symmetry_cache = cache
297
+ # Naming conventions are taken from the original paper. For your
298
+ # sanity:
299
+ # sg: subgraph
300
+ # g: graph
301
+ # e: edge(s)
302
+ # n: node(s)
303
+ # So: sgn means "subgraph nodes".
304
+ self._sgn_partitions_ = None
305
+ self._sge_partitions_ = None
306
+
307
+ self._sgn_colors_ = None
308
+ self._sge_colors_ = None
309
+
310
+ self._gn_partitions_ = None
311
+ self._ge_partitions_ = None
312
+
313
+ self._gn_colors_ = None
314
+ self._ge_colors_ = None
315
+
316
+ self._node_compat_ = None
317
+ self._edge_compat_ = None
318
+
319
+ if node_match is None:
320
+ self.node_equality = self._node_match_maker(lambda n1, n2: True)
321
+ self._sgn_partitions_ = [set(self.subgraph.nodes)]
322
+ self._gn_partitions_ = [set(self.graph.nodes)]
323
+ self._node_compat_ = {0: 0}
324
+ else:
325
+ self.node_equality = self._node_match_maker(node_match)
326
+ if edge_match is None:
327
+ self.edge_equality = self._edge_match_maker(lambda e1, e2: True)
328
+ self._sge_partitions_ = [set(self.subgraph.edges)]
329
+ self._ge_partitions_ = [set(self.graph.edges)]
330
+ self._edge_compat_ = {0: 0}
331
+ else:
332
+ self.edge_equality = self._edge_match_maker(edge_match)
333
+
334
+ @property
335
+ def _sgn_partitions(self):
336
+ if self._sgn_partitions_ is None:
337
+
338
+ def nodematch(node1, node2):
339
+ return self.node_equality(self.subgraph, node1, self.subgraph, node2)
340
+
341
+ self._sgn_partitions_ = make_partitions(self.subgraph.nodes, nodematch)
342
+ return self._sgn_partitions_
343
+
344
+ @property
345
+ def _sge_partitions(self):
346
+ if self._sge_partitions_ is None:
347
+
348
+ def edgematch(edge1, edge2):
349
+ return self.edge_equality(self.subgraph, edge1, self.subgraph, edge2)
350
+
351
+ self._sge_partitions_ = make_partitions(self.subgraph.edges, edgematch)
352
+ return self._sge_partitions_
353
+
354
+ @property
355
+ def _gn_partitions(self):
356
+ if self._gn_partitions_ is None:
357
+
358
+ def nodematch(node1, node2):
359
+ return self.node_equality(self.graph, node1, self.graph, node2)
360
+
361
+ self._gn_partitions_ = make_partitions(self.graph.nodes, nodematch)
362
+ return self._gn_partitions_
363
+
364
+ @property
365
+ def _ge_partitions(self):
366
+ if self._ge_partitions_ is None:
367
+
368
+ def edgematch(edge1, edge2):
369
+ return self.edge_equality(self.graph, edge1, self.graph, edge2)
370
+
371
+ self._ge_partitions_ = make_partitions(self.graph.edges, edgematch)
372
+ return self._ge_partitions_
373
+
374
+ @property
375
+ def _sgn_colors(self):
376
+ if self._sgn_colors_ is None:
377
+ self._sgn_colors_ = partition_to_color(self._sgn_partitions)
378
+ return self._sgn_colors_
379
+
380
+ @property
381
+ def _sge_colors(self):
382
+ if self._sge_colors_ is None:
383
+ self._sge_colors_ = partition_to_color(self._sge_partitions)
384
+ return self._sge_colors_
385
+
386
+ @property
387
+ def _gn_colors(self):
388
+ if self._gn_colors_ is None:
389
+ self._gn_colors_ = partition_to_color(self._gn_partitions)
390
+ return self._gn_colors_
391
+
392
+ @property
393
+ def _ge_colors(self):
394
+ if self._ge_colors_ is None:
395
+ self._ge_colors_ = partition_to_color(self._ge_partitions)
396
+ return self._ge_colors_
397
+
398
+ @property
399
+ def _node_compatibility(self):
400
+ if self._node_compat_ is not None:
401
+ return self._node_compat_
402
+ self._node_compat_ = {}
403
+ for sgn_part_color, gn_part_color in itertools.product(
404
+ range(len(self._sgn_partitions)), range(len(self._gn_partitions))
405
+ ):
406
+ sgn = next(iter(self._sgn_partitions[sgn_part_color]))
407
+ gn = next(iter(self._gn_partitions[gn_part_color]))
408
+ if self.node_equality(self.subgraph, sgn, self.graph, gn):
409
+ self._node_compat_[sgn_part_color] = gn_part_color
410
+ return self._node_compat_
411
+
412
+ @property
413
+ def _edge_compatibility(self):
414
+ if self._edge_compat_ is not None:
415
+ return self._edge_compat_
416
+ self._edge_compat_ = {}
417
+ for sge_part_color, ge_part_color in itertools.product(
418
+ range(len(self._sge_partitions)), range(len(self._ge_partitions))
419
+ ):
420
+ sge = next(iter(self._sge_partitions[sge_part_color]))
421
+ ge = next(iter(self._ge_partitions[ge_part_color]))
422
+ if self.edge_equality(self.subgraph, sge, self.graph, ge):
423
+ self._edge_compat_[sge_part_color] = ge_part_color
424
+ return self._edge_compat_
425
+
426
+ @staticmethod
427
+ def _node_match_maker(cmp):
428
+ @wraps(cmp)
429
+ def comparer(graph1, node1, graph2, node2):
430
+ return cmp(graph1.nodes[node1], graph2.nodes[node2])
431
+
432
+ return comparer
433
+
434
+ @staticmethod
435
+ def _edge_match_maker(cmp):
436
+ @wraps(cmp)
437
+ def comparer(graph1, edge1, graph2, edge2):
438
+ return cmp(graph1.edges[edge1], graph2.edges[edge2])
439
+
440
+ return comparer
441
+
442
+ def find_isomorphisms(self, symmetry=True):
443
+ """Find all subgraph isomorphisms between subgraph and graph
444
+
445
+ Finds isomorphisms where :attr:`subgraph` <= :attr:`graph`.
446
+
447
+ Parameters
448
+ ----------
449
+ symmetry: bool
450
+ Whether symmetry should be taken into account. If False, found
451
+ isomorphisms may be symmetrically equivalent.
452
+
453
+ Yields
454
+ ------
455
+ dict
456
+ The found isomorphism mappings of {graph_node: subgraph_node}.
457
+ """
458
+ # The networkx VF2 algorithm is slightly funny in when it yields an
459
+ # empty dict and when not.
460
+ if not self.subgraph:
461
+ yield {}
462
+ return
463
+ elif not self.graph:
464
+ return
465
+ elif len(self.graph) < len(self.subgraph):
466
+ return
467
+
468
+ if symmetry:
469
+ _, cosets = self.analyze_symmetry(
470
+ self.subgraph, self._sgn_partitions, self._sge_colors
471
+ )
472
+ constraints = self._make_constraints(cosets)
473
+ else:
474
+ constraints = []
475
+
476
+ candidates = self._find_nodecolor_candidates()
477
+ la_candidates = self._get_lookahead_candidates()
478
+ for sgn in self.subgraph:
479
+ extra_candidates = la_candidates[sgn]
480
+ if extra_candidates:
481
+ candidates[sgn] = candidates[sgn] | {frozenset(extra_candidates)}
482
+
483
+ if any(candidates.values()):
484
+ start_sgn = min(candidates, key=lambda n: min(candidates[n], key=len))
485
+ candidates[start_sgn] = (intersect(candidates[start_sgn]),)
486
+ yield from self._map_nodes(start_sgn, candidates, constraints)
487
+ else:
488
+ return
489
+
490
+ @staticmethod
491
+ def _find_neighbor_color_count(graph, node, node_color, edge_color):
492
+ """
493
+ For `node` in `graph`, count the number of edges of a specific color
494
+ it has to nodes of a specific color.
495
+ """
496
+ counts = Counter()
497
+ neighbors = graph[node]
498
+ for neighbor in neighbors:
499
+ n_color = node_color[neighbor]
500
+ if (node, neighbor) in edge_color:
501
+ e_color = edge_color[node, neighbor]
502
+ else:
503
+ e_color = edge_color[neighbor, node]
504
+ counts[e_color, n_color] += 1
505
+ return counts
506
+
507
+ def _get_lookahead_candidates(self):
508
+ """
509
+ Returns a mapping of {subgraph node: collection of graph nodes} for
510
+ which the graph nodes are feasible candidates for the subgraph node, as
511
+ determined by looking ahead one edge.
512
+ """
513
+ g_counts = {}
514
+ for gn in self.graph:
515
+ g_counts[gn] = self._find_neighbor_color_count(
516
+ self.graph, gn, self._gn_colors, self._ge_colors
517
+ )
518
+ candidates = defaultdict(set)
519
+ for sgn in self.subgraph:
520
+ sg_count = self._find_neighbor_color_count(
521
+ self.subgraph, sgn, self._sgn_colors, self._sge_colors
522
+ )
523
+ new_sg_count = Counter()
524
+ for (sge_color, sgn_color), count in sg_count.items():
525
+ try:
526
+ ge_color = self._edge_compatibility[sge_color]
527
+ gn_color = self._node_compatibility[sgn_color]
528
+ except KeyError:
529
+ pass
530
+ else:
531
+ new_sg_count[ge_color, gn_color] = count
532
+
533
+ for gn, g_count in g_counts.items():
534
+ if all(new_sg_count[x] <= g_count[x] for x in new_sg_count):
535
+ # Valid candidate
536
+ candidates[sgn].add(gn)
537
+ return candidates
538
+
539
+ def largest_common_subgraph(self, symmetry=True):
540
+ """
541
+ Find the largest common induced subgraphs between :attr:`subgraph` and
542
+ :attr:`graph`.
543
+
544
+ Parameters
545
+ ----------
546
+ symmetry: bool
547
+ Whether symmetry should be taken into account. If False, found
548
+ largest common subgraphs may be symmetrically equivalent.
549
+
550
+ Yields
551
+ ------
552
+ dict
553
+ The found isomorphism mappings of {graph_node: subgraph_node}.
554
+ """
555
+ # The networkx VF2 algorithm is slightly funny in when it yields an
556
+ # empty dict and when not.
557
+ if not self.subgraph:
558
+ yield {}
559
+ return
560
+ elif not self.graph:
561
+ return
562
+
563
+ if symmetry:
564
+ _, cosets = self.analyze_symmetry(
565
+ self.subgraph, self._sgn_partitions, self._sge_colors
566
+ )
567
+ constraints = self._make_constraints(cosets)
568
+ else:
569
+ constraints = []
570
+
571
+ candidates = self._find_nodecolor_candidates()
572
+
573
+ if any(candidates.values()):
574
+ yield from self._largest_common_subgraph(candidates, constraints)
575
+ else:
576
+ return
577
+
578
+ def analyze_symmetry(self, graph, node_partitions, edge_colors):
579
+ """
580
+ Find a minimal set of permutations and corresponding co-sets that
581
+ describe the symmetry of `graph`, given the node and edge equalities
582
+ given by `node_partitions` and `edge_colors`, respectively.
583
+
584
+ Parameters
585
+ ----------
586
+ graph : networkx.Graph
587
+ The graph whose symmetry should be analyzed.
588
+ node_partitions : list of sets
589
+ A list of sets containing node keys. Node keys in the same set
590
+ are considered equivalent. Every node key in `graph` should be in
591
+ exactly one of the sets. If all nodes are equivalent, this should
592
+ be ``[set(graph.nodes)]``.
593
+ edge_colors : dict mapping edges to their colors
594
+ A dict mapping every edge in `graph` to its corresponding color.
595
+ Edges with the same color are considered equivalent. If all edges
596
+ are equivalent, this should be ``{e: 0 for e in graph.edges}``.
597
+
598
+
599
+ Returns
600
+ -------
601
+ set[frozenset]
602
+ The found permutations. This is a set of frozensets of pairs of node
603
+ keys which can be exchanged without changing :attr:`subgraph`.
604
+ dict[collections.abc.Hashable, set[collections.abc.Hashable]]
605
+ The found co-sets. The co-sets is a dictionary of
606
+ ``{node key: set of node keys}``.
607
+ Every key-value pair describes which ``values`` can be interchanged
608
+ without changing nodes less than ``key``.
609
+ """
610
+ if self._symmetry_cache is not None:
611
+ key = hash(
612
+ (
613
+ tuple(graph.nodes),
614
+ tuple(graph.edges),
615
+ tuple(map(tuple, node_partitions)),
616
+ tuple(edge_colors.items()),
617
+ )
618
+ )
619
+ if key in self._symmetry_cache:
620
+ return self._symmetry_cache[key]
621
+ node_partitions = list(
622
+ self._refine_node_partitions(graph, node_partitions, edge_colors)
623
+ )
624
+ assert len(node_partitions) == 1
625
+ node_partitions = node_partitions[0]
626
+ permutations, cosets = self._process_ordered_pair_partitions(
627
+ graph, node_partitions, node_partitions, edge_colors
628
+ )
629
+ if self._symmetry_cache is not None:
630
+ self._symmetry_cache[key] = permutations, cosets
631
+ return permutations, cosets
632
+
633
+ def is_isomorphic(self, symmetry=False):
634
+ """
635
+ Returns True if :attr:`graph` is isomorphic to :attr:`subgraph` and
636
+ False otherwise.
637
+
638
+ Returns
639
+ -------
640
+ bool
641
+ """
642
+ return len(self.subgraph) == len(self.graph) and self.subgraph_is_isomorphic(
643
+ symmetry
644
+ )
645
+
646
+ def subgraph_is_isomorphic(self, symmetry=False):
647
+ """
648
+ Returns True if a subgraph of :attr:`graph` is isomorphic to
649
+ :attr:`subgraph` and False otherwise.
650
+
651
+ Returns
652
+ -------
653
+ bool
654
+ """
655
+ # symmetry=False, since we only need to know whether there is any
656
+ # example; figuring out all symmetry elements probably costs more time
657
+ # than it gains.
658
+ isom = next(self.subgraph_isomorphisms_iter(symmetry=symmetry), None)
659
+ return isom is not None
660
+
661
+ def isomorphisms_iter(self, symmetry=True):
662
+ """
663
+ Does the same as :meth:`find_isomorphisms` if :attr:`graph` and
664
+ :attr:`subgraph` have the same number of nodes.
665
+ """
666
+ if len(self.graph) == len(self.subgraph):
667
+ yield from self.subgraph_isomorphisms_iter(symmetry=symmetry)
668
+
669
+ def subgraph_isomorphisms_iter(self, symmetry=True):
670
+ """Alternative name for :meth:`find_isomorphisms`."""
671
+ return self.find_isomorphisms(symmetry)
672
+
673
+ def _find_nodecolor_candidates(self):
674
+ """
675
+ Per node in subgraph find all nodes in graph that have the same color.
676
+ """
677
+ candidates = defaultdict(set)
678
+ for sgn in self.subgraph.nodes:
679
+ sgn_color = self._sgn_colors[sgn]
680
+ if sgn_color in self._node_compatibility:
681
+ gn_color = self._node_compatibility[sgn_color]
682
+ candidates[sgn].add(frozenset(self._gn_partitions[gn_color]))
683
+ else:
684
+ candidates[sgn].add(frozenset())
685
+ candidates = dict(candidates)
686
+ for sgn, options in candidates.items():
687
+ candidates[sgn] = frozenset(options)
688
+ return candidates
689
+
690
+ @staticmethod
691
+ def _make_constraints(cosets):
692
+ """
693
+ Turn cosets into constraints.
694
+ """
695
+ constraints = []
696
+ for node_i, node_ts in cosets.items():
697
+ for node_t in node_ts:
698
+ if node_i != node_t:
699
+ # Node i must be smaller than node t.
700
+ constraints.append((node_i, node_t))
701
+ return constraints
702
+
703
+ @staticmethod
704
+ def _find_node_edge_color(graph, node_colors, edge_colors):
705
+ """
706
+ For every node in graph, come up with a color that combines 1) the
707
+ color of the node, and 2) the number of edges of a color to each type
708
+ of node.
709
+ """
710
+ counts = defaultdict(lambda: defaultdict(int))
711
+ for node1, node2 in graph.edges:
712
+ if (node1, node2) in edge_colors:
713
+ # FIXME directed graphs
714
+ ecolor = edge_colors[node1, node2]
715
+ else:
716
+ ecolor = edge_colors[node2, node1]
717
+ # Count per node how many edges it has of what color to nodes of
718
+ # what color
719
+ counts[node1][ecolor, node_colors[node2]] += 1
720
+ counts[node2][ecolor, node_colors[node1]] += 1
721
+
722
+ node_edge_colors = {}
723
+ for node in graph.nodes:
724
+ node_edge_colors[node] = node_colors[node], set(counts[node].items())
725
+
726
+ return node_edge_colors
727
+
728
+ @staticmethod
729
+ def _get_permutations_by_length(items):
730
+ """
731
+ Get all permutations of items, but only permute items with the same
732
+ length.
733
+
734
+ >>> found = list(ISMAGS._get_permutations_by_length([[1], [2], [3, 4], [4, 5]]))
735
+ >>> answer = [
736
+ ... (([1], [2]), ([3, 4], [4, 5])),
737
+ ... (([1], [2]), ([4, 5], [3, 4])),
738
+ ... (([2], [1]), ([3, 4], [4, 5])),
739
+ ... (([2], [1]), ([4, 5], [3, 4])),
740
+ ... ]
741
+ >>> found == answer
742
+ True
743
+ """
744
+ by_len = defaultdict(list)
745
+ for item in items:
746
+ by_len[len(item)].append(item)
747
+
748
+ yield from itertools.product(
749
+ *(itertools.permutations(by_len[l]) for l in sorted(by_len))
750
+ )
751
+
752
+ @classmethod
753
+ def _refine_node_partitions(cls, graph, node_partitions, edge_colors, branch=False):
754
+ """
755
+ Given a partition of nodes in graph, make the partitions smaller such
756
+ that all nodes in a partition have 1) the same color, and 2) the same
757
+ number of edges to specific other partitions.
758
+ """
759
+
760
+ def equal_color(node1, node2):
761
+ return node_edge_colors[node1] == node_edge_colors[node2]
762
+
763
+ node_partitions = list(node_partitions)
764
+ node_colors = partition_to_color(node_partitions)
765
+ node_edge_colors = cls._find_node_edge_color(graph, node_colors, edge_colors)
766
+ if all(
767
+ are_all_equal(node_edge_colors[node] for node in partition)
768
+ for partition in node_partitions
769
+ ):
770
+ yield node_partitions
771
+ return
772
+
773
+ new_partitions = []
774
+ output = [new_partitions]
775
+ for partition in node_partitions:
776
+ if not are_all_equal(node_edge_colors[node] for node in partition):
777
+ refined = make_partitions(partition, equal_color)
778
+ if (
779
+ branch
780
+ and len(refined) != 1
781
+ and len({len(r) for r in refined}) != len([len(r) for r in refined])
782
+ ):
783
+ # This is where it breaks. There are multiple new cells
784
+ # in refined with the same length, and their order
785
+ # matters.
786
+ # So option 1) Hit it with a big hammer and simply make all
787
+ # orderings.
788
+ permutations = cls._get_permutations_by_length(refined)
789
+ new_output = []
790
+ for n_p in output:
791
+ for permutation in permutations:
792
+ new_output.append(n_p + list(permutation[0]))
793
+ output = new_output
794
+ else:
795
+ for n_p in output:
796
+ n_p.extend(sorted(refined, key=len))
797
+ else:
798
+ for n_p in output:
799
+ n_p.append(partition)
800
+ for n_p in output:
801
+ yield from cls._refine_node_partitions(graph, n_p, edge_colors, branch)
802
+
803
+ def _edges_of_same_color(self, sgn1, sgn2):
804
+ """
805
+ Returns all edges in :attr:`graph` that have the same colour as the
806
+ edge between sgn1 and sgn2 in :attr:`subgraph`.
807
+ """
808
+ if (sgn1, sgn2) in self._sge_colors:
809
+ # FIXME directed graphs
810
+ sge_color = self._sge_colors[sgn1, sgn2]
811
+ else:
812
+ sge_color = self._sge_colors[sgn2, sgn1]
813
+ if sge_color in self._edge_compatibility:
814
+ ge_color = self._edge_compatibility[sge_color]
815
+ g_edges = self._ge_partitions[ge_color]
816
+ else:
817
+ g_edges = []
818
+ return g_edges
819
+
820
+ def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=None):
821
+ """
822
+ Find all subgraph isomorphisms honoring constraints.
823
+ """
824
+ if mapping is None:
825
+ mapping = {}
826
+ else:
827
+ mapping = mapping.copy()
828
+ if to_be_mapped is None:
829
+ to_be_mapped = set(self.subgraph.nodes)
830
+
831
+ # Note, we modify candidates here. Doesn't seem to affect results, but
832
+ # remember this.
833
+ # candidates = candidates.copy()
834
+ sgn_candidates = intersect(candidates[sgn])
835
+ candidates[sgn] = frozenset([sgn_candidates])
836
+ for gn in sgn_candidates:
837
+ # We're going to try to map sgn to gn.
838
+ if gn in mapping.values() or sgn not in to_be_mapped:
839
+ # gn is already mapped to something
840
+ continue # pragma: no cover
841
+
842
+ # REDUCTION and COMBINATION
843
+ mapping[sgn] = gn
844
+ # BASECASE
845
+ if to_be_mapped == set(mapping.keys()):
846
+ yield {v: k for k, v in mapping.items()}
847
+ continue
848
+ left_to_map = to_be_mapped - set(mapping.keys())
849
+
850
+ new_candidates = candidates.copy()
851
+ sgn_neighbours = set(self.subgraph[sgn])
852
+ not_gn_neighbours = set(self.graph.nodes) - set(self.graph[gn])
853
+ for sgn2 in left_to_map:
854
+ if sgn2 not in sgn_neighbours:
855
+ gn2_options = not_gn_neighbours
856
+ else:
857
+ # Get all edges to gn of the right color:
858
+ g_edges = self._edges_of_same_color(sgn, sgn2)
859
+ # FIXME directed graphs
860
+ # And all nodes involved in those which are connected to gn
861
+ gn2_options = {n for e in g_edges for n in e if gn in e}
862
+ # Node color compatibility should be taken care of by the
863
+ # initial candidate lists made by find_subgraphs
864
+
865
+ # Add gn2_options to the right collection. Since new_candidates
866
+ # is a dict of frozensets of frozensets of node indices it's
867
+ # a bit clunky. We can't do .add, and + also doesn't work. We
868
+ # could do |, but I deem union to be clearer.
869
+ new_candidates[sgn2] = new_candidates[sgn2].union(
870
+ [frozenset(gn2_options)]
871
+ )
872
+
873
+ if (sgn, sgn2) in constraints:
874
+ gn2_options = {gn2 for gn2 in self.graph if gn2 > gn}
875
+ elif (sgn2, sgn) in constraints:
876
+ gn2_options = {gn2 for gn2 in self.graph if gn2 < gn}
877
+ else:
878
+ continue # pragma: no cover
879
+ new_candidates[sgn2] = new_candidates[sgn2].union(
880
+ [frozenset(gn2_options)]
881
+ )
882
+
883
+ # The next node is the one that is unmapped and has fewest
884
+ # candidates
885
+ # Pylint disables because it's a one-shot function.
886
+ next_sgn = min(
887
+ left_to_map, key=lambda n: min(new_candidates[n], key=len)
888
+ ) # pylint: disable=cell-var-from-loop
889
+ yield from self._map_nodes(
890
+ next_sgn,
891
+ new_candidates,
892
+ constraints,
893
+ mapping=mapping,
894
+ to_be_mapped=to_be_mapped,
895
+ )
896
+ # Unmap sgn-gn. Strictly not necessary since it'd get overwritten
897
+ # when making a new mapping for sgn.
898
+ # del mapping[sgn]
899
+
900
+ def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None):
901
+ """
902
+ Find all largest common subgraphs honoring constraints.
903
+ """
904
+ if to_be_mapped is None:
905
+ to_be_mapped = {frozenset(self.subgraph.nodes)}
906
+
907
+ # The LCS problem is basically a repeated subgraph isomorphism problem
908
+ # with smaller and smaller subgraphs. We store the nodes that are
909
+ # "part of" the subgraph in to_be_mapped, and we make it a little
910
+ # smaller every iteration.
911
+
912
+ # pylint disable because it's guarded against by default value
913
+ current_size = len(
914
+ next(iter(to_be_mapped), [])
915
+ ) # pylint: disable=stop-iteration-return
916
+
917
+ found_iso = False
918
+ if current_size <= len(self.graph):
919
+ # There's no point in trying to find isomorphisms of
920
+ # graph >= subgraph if subgraph has more nodes than graph.
921
+
922
+ # Try the isomorphism first with the nodes with lowest ID. So sort
923
+ # them. Those are more likely to be part of the final
924
+ # correspondence. This makes finding the first answer(s) faster. In
925
+ # theory.
926
+ for nodes in sorted(to_be_mapped, key=sorted):
927
+ # Find the isomorphism between subgraph[to_be_mapped] <= graph
928
+ next_sgn = min(nodes, key=lambda n: min(candidates[n], key=len))
929
+ isomorphs = self._map_nodes(
930
+ next_sgn, candidates, constraints, to_be_mapped=nodes
931
+ )
932
+
933
+ # This is effectively `yield from isomorphs`, except that we look
934
+ # whether an item was yielded.
935
+ try:
936
+ item = next(isomorphs)
937
+ except StopIteration:
938
+ pass
939
+ else:
940
+ yield item
941
+ yield from isomorphs
942
+ found_iso = True
943
+
944
+ # BASECASE
945
+ if found_iso or current_size == 1:
946
+ # Shrinking has no point because either 1) we end up with a smaller
947
+ # common subgraph (and we want the largest), or 2) there'll be no
948
+ # more subgraph.
949
+ return
950
+
951
+ left_to_be_mapped = set()
952
+ for nodes in to_be_mapped:
953
+ for sgn in nodes:
954
+ # We're going to remove sgn from to_be_mapped, but subject to
955
+ # symmetry constraints. We know that for every constraint we
956
+ # have those subgraph nodes are equal. So whenever we would
957
+ # remove the lower part of a constraint, remove the higher
958
+ # instead. This is all dealth with by _remove_node. And because
959
+ # left_to_be_mapped is a set, we don't do double work.
960
+
961
+ # And finally, make the subgraph one node smaller.
962
+ # REDUCTION
963
+ new_nodes = self._remove_node(sgn, nodes, constraints)
964
+ left_to_be_mapped.add(new_nodes)
965
+ # COMBINATION
966
+ yield from self._largest_common_subgraph(
967
+ candidates, constraints, to_be_mapped=left_to_be_mapped
968
+ )
969
+
970
+ @staticmethod
971
+ def _remove_node(node, nodes, constraints):
972
+ """
973
+ Returns a new set where node has been removed from nodes, subject to
974
+ symmetry constraints. We know, that for every constraint we have
975
+ those subgraph nodes are equal. So whenever we would remove the
976
+ lower part of a constraint, remove the higher instead.
977
+ """
978
+ while True:
979
+ for low, high in constraints:
980
+ if low == node and high in nodes:
981
+ node = high
982
+ break
983
+ else: # no break, couldn't find node in constraints
984
+ break
985
+ return frozenset(nodes - {node})
986
+
987
+ @staticmethod
988
+ def _find_permutations(top_partitions, bottom_partitions):
989
+ """
990
+ Return the pairs of top/bottom partitions where the partitions are
991
+ different. Ensures that all partitions in both top and bottom
992
+ partitions have size 1.
993
+ """
994
+ # Find permutations
995
+ permutations = set()
996
+ for top, bot in zip(top_partitions, bottom_partitions):
997
+ # top and bot have only one element
998
+ if len(top) != 1 or len(bot) != 1:
999
+ raise IndexError(
1000
+ "Not all nodes are coupled. This is"
1001
+ f" impossible: {top_partitions}, {bottom_partitions}"
1002
+ )
1003
+ if top != bot:
1004
+ permutations.add(frozenset((next(iter(top)), next(iter(bot)))))
1005
+ return permutations
1006
+
1007
+ @staticmethod
1008
+ def _update_orbits(orbits, permutations):
1009
+ """
1010
+ Update orbits based on permutations. Orbits is modified in place.
1011
+ For every pair of items in permutations their respective orbits are
1012
+ merged.
1013
+ """
1014
+ for permutation in permutations:
1015
+ node, node2 = permutation
1016
+ # Find the orbits that contain node and node2, and replace the
1017
+ # orbit containing node with the union
1018
+ first = second = None
1019
+ for idx, orbit in enumerate(orbits):
1020
+ if first is not None and second is not None:
1021
+ break
1022
+ if node in orbit:
1023
+ first = idx
1024
+ if node2 in orbit:
1025
+ second = idx
1026
+ if first != second:
1027
+ orbits[first].update(orbits[second])
1028
+ del orbits[second]
1029
+
1030
+ def _couple_nodes(
1031
+ self,
1032
+ top_partitions,
1033
+ bottom_partitions,
1034
+ pair_idx,
1035
+ t_node,
1036
+ b_node,
1037
+ graph,
1038
+ edge_colors,
1039
+ ):
1040
+ """
1041
+ Generate new partitions from top and bottom_partitions where t_node is
1042
+ coupled to b_node. pair_idx is the index of the partitions where t_ and
1043
+ b_node can be found.
1044
+ """
1045
+ t_partition = top_partitions[pair_idx]
1046
+ b_partition = bottom_partitions[pair_idx]
1047
+ assert t_node in t_partition and b_node in b_partition
1048
+ # Couple node to node2. This means they get their own partition
1049
+ new_top_partitions = [top.copy() for top in top_partitions]
1050
+ new_bottom_partitions = [bot.copy() for bot in bottom_partitions]
1051
+ new_t_groups = {t_node}, t_partition - {t_node}
1052
+ new_b_groups = {b_node}, b_partition - {b_node}
1053
+ # Replace the old partitions with the coupled ones
1054
+ del new_top_partitions[pair_idx]
1055
+ del new_bottom_partitions[pair_idx]
1056
+ new_top_partitions[pair_idx:pair_idx] = new_t_groups
1057
+ new_bottom_partitions[pair_idx:pair_idx] = new_b_groups
1058
+
1059
+ new_top_partitions = self._refine_node_partitions(
1060
+ graph, new_top_partitions, edge_colors
1061
+ )
1062
+ new_bottom_partitions = self._refine_node_partitions(
1063
+ graph, new_bottom_partitions, edge_colors, branch=True
1064
+ )
1065
+ new_top_partitions = list(new_top_partitions)
1066
+ assert len(new_top_partitions) == 1
1067
+ new_top_partitions = new_top_partitions[0]
1068
+ for bot in new_bottom_partitions:
1069
+ yield list(new_top_partitions), bot
1070
+
1071
+ def _process_ordered_pair_partitions(
1072
+ self,
1073
+ graph,
1074
+ top_partitions,
1075
+ bottom_partitions,
1076
+ edge_colors,
1077
+ orbits=None,
1078
+ cosets=None,
1079
+ ):
1080
+ """
1081
+ Processes ordered pair partitions as per the reference paper. Finds and
1082
+ returns all permutations and cosets that leave the graph unchanged.
1083
+ """
1084
+ if orbits is None:
1085
+ orbits = [{node} for node in graph.nodes]
1086
+ else:
1087
+ # Note that we don't copy orbits when we are given one. This means
1088
+ # we leak information between the recursive branches. This is
1089
+ # intentional!
1090
+ orbits = orbits
1091
+ if cosets is None:
1092
+ cosets = {}
1093
+ else:
1094
+ cosets = cosets.copy()
1095
+
1096
+ assert all(
1097
+ len(t_p) == len(b_p) for t_p, b_p in zip(top_partitions, bottom_partitions)
1098
+ )
1099
+
1100
+ # BASECASE
1101
+ if all(len(top) == 1 for top in top_partitions):
1102
+ # All nodes are mapped
1103
+ permutations = self._find_permutations(top_partitions, bottom_partitions)
1104
+ self._update_orbits(orbits, permutations)
1105
+ if permutations:
1106
+ return [permutations], cosets
1107
+ else:
1108
+ return [], cosets
1109
+
1110
+ permutations = []
1111
+ unmapped_nodes = {
1112
+ (node, idx)
1113
+ for idx, t_partition in enumerate(top_partitions)
1114
+ for node in t_partition
1115
+ if len(t_partition) > 1
1116
+ }
1117
+ node, pair_idx = min(unmapped_nodes)
1118
+ b_partition = bottom_partitions[pair_idx]
1119
+
1120
+ for node2 in sorted(b_partition):
1121
+ if len(b_partition) == 1:
1122
+ # Can never result in symmetry
1123
+ continue
1124
+ if node != node2 and any(
1125
+ node in orbit and node2 in orbit for orbit in orbits
1126
+ ):
1127
+ # Orbit prune branch
1128
+ continue
1129
+ # REDUCTION
1130
+ # Couple node to node2
1131
+ partitions = self._couple_nodes(
1132
+ top_partitions,
1133
+ bottom_partitions,
1134
+ pair_idx,
1135
+ node,
1136
+ node2,
1137
+ graph,
1138
+ edge_colors,
1139
+ )
1140
+ for opp in partitions:
1141
+ new_top_partitions, new_bottom_partitions = opp
1142
+
1143
+ new_perms, new_cosets = self._process_ordered_pair_partitions(
1144
+ graph,
1145
+ new_top_partitions,
1146
+ new_bottom_partitions,
1147
+ edge_colors,
1148
+ orbits,
1149
+ cosets,
1150
+ )
1151
+ # COMBINATION
1152
+ permutations += new_perms
1153
+ cosets.update(new_cosets)
1154
+
1155
+ mapped = {
1156
+ k
1157
+ for top, bottom in zip(top_partitions, bottom_partitions)
1158
+ for k in top
1159
+ if len(top) == 1 and top == bottom
1160
+ }
1161
+ ks = {k for k in graph.nodes if k < node}
1162
+ # Have all nodes with ID < node been mapped?
1163
+ find_coset = ks <= mapped and node not in cosets
1164
+ if find_coset:
1165
+ # Find the orbit that contains node
1166
+ for orbit in orbits:
1167
+ if node in orbit:
1168
+ cosets[node] = orbit.copy()
1169
+ return permutations, cosets
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/isomorph.py ADDED
@@ -0,0 +1,248 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Graph isomorphism functions.
3
+ """
4
+ import networkx as nx
5
+ from networkx.exception import NetworkXError
6
+
7
+ __all__ = [
8
+ "could_be_isomorphic",
9
+ "fast_could_be_isomorphic",
10
+ "faster_could_be_isomorphic",
11
+ "is_isomorphic",
12
+ ]
13
+
14
+
15
+ @nx._dispatch(graphs={"G1": 0, "G2": 1})
16
+ def could_be_isomorphic(G1, G2):
17
+ """Returns False if graphs are definitely not isomorphic.
18
+ True does NOT guarantee isomorphism.
19
+
20
+ Parameters
21
+ ----------
22
+ G1, G2 : graphs
23
+ The two graphs G1 and G2 must be the same type.
24
+
25
+ Notes
26
+ -----
27
+ Checks for matching degree, triangle, and number of cliques sequences.
28
+ The triangle sequence contains the number of triangles each node is part of.
29
+ The clique sequence contains for each node the number of maximal cliques
30
+ involving that node.
31
+
32
+ """
33
+
34
+ # Check global properties
35
+ if G1.order() != G2.order():
36
+ return False
37
+
38
+ # Check local properties
39
+ d1 = G1.degree()
40
+ t1 = nx.triangles(G1)
41
+ clqs_1 = list(nx.find_cliques(G1))
42
+ c1 = {n: sum(1 for c in clqs_1 if n in c) for n in G1} # number of cliques
43
+ props1 = [[d, t1[v], c1[v]] for v, d in d1]
44
+ props1.sort()
45
+
46
+ d2 = G2.degree()
47
+ t2 = nx.triangles(G2)
48
+ clqs_2 = list(nx.find_cliques(G2))
49
+ c2 = {n: sum(1 for c in clqs_2 if n in c) for n in G2} # number of cliques
50
+ props2 = [[d, t2[v], c2[v]] for v, d in d2]
51
+ props2.sort()
52
+
53
+ if props1 != props2:
54
+ return False
55
+
56
+ # OK...
57
+ return True
58
+
59
+
60
+ graph_could_be_isomorphic = could_be_isomorphic
61
+
62
+
63
+ @nx._dispatch(graphs={"G1": 0, "G2": 1})
64
+ def fast_could_be_isomorphic(G1, G2):
65
+ """Returns False if graphs are definitely not isomorphic.
66
+
67
+ True does NOT guarantee isomorphism.
68
+
69
+ Parameters
70
+ ----------
71
+ G1, G2 : graphs
72
+ The two graphs G1 and G2 must be the same type.
73
+
74
+ Notes
75
+ -----
76
+ Checks for matching degree and triangle sequences. The triangle
77
+ sequence contains the number of triangles each node is part of.
78
+ """
79
+ # Check global properties
80
+ if G1.order() != G2.order():
81
+ return False
82
+
83
+ # Check local properties
84
+ d1 = G1.degree()
85
+ t1 = nx.triangles(G1)
86
+ props1 = [[d, t1[v]] for v, d in d1]
87
+ props1.sort()
88
+
89
+ d2 = G2.degree()
90
+ t2 = nx.triangles(G2)
91
+ props2 = [[d, t2[v]] for v, d in d2]
92
+ props2.sort()
93
+
94
+ if props1 != props2:
95
+ return False
96
+
97
+ # OK...
98
+ return True
99
+
100
+
101
+ fast_graph_could_be_isomorphic = fast_could_be_isomorphic
102
+
103
+
104
+ @nx._dispatch(graphs={"G1": 0, "G2": 1})
105
+ def faster_could_be_isomorphic(G1, G2):
106
+ """Returns False if graphs are definitely not isomorphic.
107
+
108
+ True does NOT guarantee isomorphism.
109
+
110
+ Parameters
111
+ ----------
112
+ G1, G2 : graphs
113
+ The two graphs G1 and G2 must be the same type.
114
+
115
+ Notes
116
+ -----
117
+ Checks for matching degree sequences.
118
+ """
119
+ # Check global properties
120
+ if G1.order() != G2.order():
121
+ return False
122
+
123
+ # Check local properties
124
+ d1 = sorted(d for n, d in G1.degree())
125
+ d2 = sorted(d for n, d in G2.degree())
126
+
127
+ if d1 != d2:
128
+ return False
129
+
130
+ # OK...
131
+ return True
132
+
133
+
134
+ faster_graph_could_be_isomorphic = faster_could_be_isomorphic
135
+
136
+
137
+ @nx._dispatch(
138
+ graphs={"G1": 0, "G2": 1},
139
+ preserve_edge_attrs="edge_match",
140
+ preserve_node_attrs="node_match",
141
+ )
142
+ def is_isomorphic(G1, G2, node_match=None, edge_match=None):
143
+ """Returns True if the graphs G1 and G2 are isomorphic and False otherwise.
144
+
145
+ Parameters
146
+ ----------
147
+ G1, G2: graphs
148
+ The two graphs G1 and G2 must be the same type.
149
+
150
+ node_match : callable
151
+ A function that returns True if node n1 in G1 and n2 in G2 should
152
+ be considered equal during the isomorphism test.
153
+ If node_match is not specified then node attributes are not considered.
154
+
155
+ The function will be called like
156
+
157
+ node_match(G1.nodes[n1], G2.nodes[n2]).
158
+
159
+ That is, the function will receive the node attribute dictionaries
160
+ for n1 and n2 as inputs.
161
+
162
+ edge_match : callable
163
+ A function that returns True if the edge attribute dictionary
164
+ for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
165
+ be considered equal during the isomorphism test. If edge_match is
166
+ not specified then edge attributes are not considered.
167
+
168
+ The function will be called like
169
+
170
+ edge_match(G1[u1][v1], G2[u2][v2]).
171
+
172
+ That is, the function will receive the edge attribute dictionaries
173
+ of the edges under consideration.
174
+
175
+ Notes
176
+ -----
177
+ Uses the vf2 algorithm [1]_.
178
+
179
+ Examples
180
+ --------
181
+ >>> import networkx.algorithms.isomorphism as iso
182
+
183
+ For digraphs G1 and G2, using 'weight' edge attribute (default: 1)
184
+
185
+ >>> G1 = nx.DiGraph()
186
+ >>> G2 = nx.DiGraph()
187
+ >>> nx.add_path(G1, [1, 2, 3, 4], weight=1)
188
+ >>> nx.add_path(G2, [10, 20, 30, 40], weight=2)
189
+ >>> em = iso.numerical_edge_match("weight", 1)
190
+ >>> nx.is_isomorphic(G1, G2) # no weights considered
191
+ True
192
+ >>> nx.is_isomorphic(G1, G2, edge_match=em) # match weights
193
+ False
194
+
195
+ For multidigraphs G1 and G2, using 'fill' node attribute (default: '')
196
+
197
+ >>> G1 = nx.MultiDiGraph()
198
+ >>> G2 = nx.MultiDiGraph()
199
+ >>> G1.add_nodes_from([1, 2, 3], fill="red")
200
+ >>> G2.add_nodes_from([10, 20, 30, 40], fill="red")
201
+ >>> nx.add_path(G1, [1, 2, 3, 4], weight=3, linewidth=2.5)
202
+ >>> nx.add_path(G2, [10, 20, 30, 40], weight=3)
203
+ >>> nm = iso.categorical_node_match("fill", "red")
204
+ >>> nx.is_isomorphic(G1, G2, node_match=nm)
205
+ True
206
+
207
+ For multidigraphs G1 and G2, using 'weight' edge attribute (default: 7)
208
+
209
+ >>> G1.add_edge(1, 2, weight=7)
210
+ 1
211
+ >>> G2.add_edge(10, 20)
212
+ 1
213
+ >>> em = iso.numerical_multiedge_match("weight", 7, rtol=1e-6)
214
+ >>> nx.is_isomorphic(G1, G2, edge_match=em)
215
+ True
216
+
217
+ For multigraphs G1 and G2, using 'weight' and 'linewidth' edge attributes
218
+ with default values 7 and 2.5. Also using 'fill' node attribute with
219
+ default value 'red'.
220
+
221
+ >>> em = iso.numerical_multiedge_match(["weight", "linewidth"], [7, 2.5])
222
+ >>> nm = iso.categorical_node_match("fill", "red")
223
+ >>> nx.is_isomorphic(G1, G2, edge_match=em, node_match=nm)
224
+ True
225
+
226
+ See Also
227
+ --------
228
+ numerical_node_match, numerical_edge_match, numerical_multiedge_match
229
+ categorical_node_match, categorical_edge_match, categorical_multiedge_match
230
+
231
+ References
232
+ ----------
233
+ .. [1] L. P. Cordella, P. Foggia, C. Sansone, M. Vento,
234
+ "An Improved Algorithm for Matching Large Graphs",
235
+ 3rd IAPR-TC15 Workshop on Graph-based Representations in
236
+ Pattern Recognition, Cuen, pp. 149-159, 2001.
237
+ https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
238
+ """
239
+ if G1.is_directed() and G2.is_directed():
240
+ GM = nx.algorithms.isomorphism.DiGraphMatcher
241
+ elif (not G1.is_directed()) and (not G2.is_directed()):
242
+ GM = nx.algorithms.isomorphism.GraphMatcher
243
+ else:
244
+ raise NetworkXError("Graphs G1 and G2 are not of the same type.")
245
+
246
+ gm = GM(G1, G2, node_match=node_match, edge_match=edge_match)
247
+
248
+ return gm.is_isomorphic()