我正在尝试为有向图实现深度优先搜索(DFS)算法,如Cormen等人所述,Introduction to Algorithms (3rd ed.)。这是我到目前为止的实现:
import pytest
from collections import OrderedDict
import copy
class Node(object):
def __init__(self, color='white', parent=None, d=None, f=None):
self.color = color
self.parent = parent
self.d = d # Discovery time
self.f = f # Finishing time
class Graph(object):
def __init__(self, edges, node_indices=None):
self.edges = edges
self.nodes = self.initialize_nodes(node_indices )
self.adj = self.initialize_adjacency_list()
def initialize_nodes(self, node_indices=None):
if node_indices is None:
node_indices = sorted(list(set(node for edge in self.edges for node in edge)))
return OrderedDict([(node_index, Node()) for node_index in node_indices])
def initialize_adjacency_list(self):
A = {node: [] for node in self.nodes}
for edge in self.edges:
u, v = edge
A[u].append(v)
return A
def dfs(self):
self.time = 0
for u, node in self.nodes.items():
if node.color == 'white':
self.dfs_visit(u)
def dfs_visit(self, u):
self.time += 1
self.nodes[u].d = self.time
self.nodes[u].color = 'gray'
for v in self.adj[u]:
if self.nodes[v].color == 'white':
self.nodes[v].parent = u
self.dfs_visit(v)
self.nodes[u].color = 'black'
self.time += 1
self.nodes[u].f = self.time
@staticmethod
def transpose(edges):
return [(v,u) for (u,v) in edges]
def strongly_connected_components(self):
self.dfs()
finishing_times = {u: node.f for u, node in self.nodes.items()}
self.__init__(self.transpose(self.edges))
node_indices = sorted(finishing_times, key=finishing_times.get, reverse=True)
self.nodes = self.initialize_nodes(node_indices)
self.dfs()
return self.trees()
def trees(self):
_trees = []
nodes = copy.deepcopy(self.nodes)
while nodes:
for u, node in nodes.items():
if node.parent is None:
_trees.append([u])
nodes.pop(u)
else:
for tree in _trees:
if node.parent in tree:
tree.append(u)
nodes.pop(u)
return _trees
为了测试它是否有效,我已经从图22.9中的例子中得到了一个例子:
将节点 a 分别重命名为 h 1
到8
之后,我运行了以下测试:
def test_strongly_connected_components():
edges = [(1,2), (5,1), (2,5), (5,6), (2,6), (6,7), (7,6), (2,3), (3,7), (3,4), (4,3), (4,8), (7,8), (8,8)]
graph = Graph(edges)
assert graph.strongly_connected_components() == [[1, 5, 2], [3, 4], [6, 7], [8]]
if __name__ == "__main__":
pytest.main([__file__+"::test_strongly_connected_components", "-s"])
该测试通过,确认图中灰色阴影的SCC。
对于真实的'但是,我需要使用一个输入文件SCC.txt,它包含875,714行代表边(作为头尾对整数),并输出五个最大SCC的大小。为此我尝试了以下测试:
@pytest.fixture
def edges():
with open('SCC.txt') as f:
return [tuple(map(int, line.split())) for line in f.read().splitlines()]
def test_SCC_on_full_graph(edges):
graph = Graph(edges)
SCCs = graph.strongly_connected_components()
print([map(len, SCCs)].sort(reverse=True)) # Read off the size of the largest SCCs
if __name__ == "__main__":
pytest.main([__file__+"::test_SCC_on_full_graph", "-s"])
但是,我遇到了RuntimeError: maximum recursion depth exceeded in cmp
:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <scc.Graph object at 0x103253690>, u = 209099
def dfs_visit(self, u):
self.time += 1
self.nodes[u].d = self.time
self.nodes[u].color = 'gray'
for v in self.adj[u]:
> if self.nodes[v].color == 'white':
E RuntimeError: maximum recursion depth exceeded in cmp
scc.py:53: RuntimeError
========================== 1 failed in 21.79 seconds ===========================
我已经读过有关增加sys.setrecursionlimit的内容,但这似乎不是一种推荐的做法。除了我不确定如何改进代码,因为它实际上实现了书中给出的伪代码。关于如何克服这个错误的任何想法?
答案 0 :(得分:1)
DFS必须是逻辑DFS,但是通过编程方式,您可以尝试解决方法。
以这样一种方式编写DFS,如果它达到递归限制附近,你可以从其中一个顶层函数重试它。
尝试使用多处理。
PS: 是否有可能为更大的数据集发生无限递归?使用更大的数据集时出现的逻辑错误。 如果您有增量大小的数据集,您还可以确定算法在python中的实现限制。
答案 1 :(得分:1)
我设法使用threading
库来增加stack_size
和递归限制来解决问题。以下是解决方案的代码:
import sys
import pytest
from collections import OrderedDict
import copy
import threading
class Node(object):
def __init__(self, color='white', parent=None, d=None, f=None):
self.color = color
self.parent = parent
self.d = d # Discovery time
self.f = f # Finishing time
class Graph(object):
def __init__(self, edges, node_indices=None):
self.edges = edges
self.nodes = self.initialize_nodes(node_indices )
self.adj = self.initialize_adjacency_list()
self.trees = dict()
def initialize_nodes(self, node_indices=None):
if node_indices is None:
node_indices = sorted(list(set(node for edge in self.edges for node in edge)))
return OrderedDict([(node_index, Node()) for node_index in node_indices])
def initialize_adjacency_list(self):
A = {node: [] for node in self.nodes}
for edge in self.edges:
u, v = edge
A[u].append(v)
return A
def dfs(self):
self.time = 0
for u, node in self.nodes.items():
if node.color == 'white':
self.dfs_visit(u, root=u)
def dfs_visit(self, u, root=None):
if u == root:
self.trees[root] = set()
self.time += 1
self.nodes[u].d = self.time
self.nodes[u].color = 'gray'
for v in self.adj[u]:
if self.nodes[v].color == 'white':
self.nodes[v].parent = u
self.trees[root].add(v)
self.dfs_visit(v, root=root)
self.nodes[u].color = 'black'
self.time += 1
self.nodes[u].f = self.time
@staticmethod
def transpose(edges):
return [(v,u) for (u,v) in edges]
def strongly_connected_components(self):
self.dfs()
finishing_times = {u: node.f for u, node in self.nodes.items()}
self.__init__(self.transpose(self.edges))
node_indices = sorted(finishing_times, key=finishing_times.get, reverse=True)
self.nodes = self.initialize_nodes(node_indices)
self.dfs()
trees = copy.deepcopy(self.trees)
for k, v in trees.items():
v.add(k)
return trees.values()
@pytest.fixture
def edges():
with open('SCC.txt') as f:
return [tuple(map(int, line.split())) for line in f.read().splitlines()]
def SCC_on_full_graph():
E = edges()
graph = Graph(E)
SCCs = graph.strongly_connected_components()
SCC_sizes = sorted(list(map(len, SCCs)), reverse=True)
print(SCC_sizes[:5]) # Read off the size of the 5 largest SCCs
if __name__ == "__main__":
threading.stack_size(67108864)
sys.setrecursionlimit(2**20)
thread = threading.Thread(target=SCC_on_full_graph)
thread.start()