# 最大流量和线性分配问题

## 准备工作

### 有向图（DiGraphs）

`DiGraph = namedtuple('DiGraph', ['setOfNodes','setOfArcs'])`

（注意：本文中的所有代码都是用Python 3.6编写的。）

### 节点（Nodes）

`x.uid != y.uid`
• `n.datum`：这表示任何数据对象。
`Node = namedtuple('Node', ['uid','datum'])`

### 弧（Arcs）

• `a.fromNode`：这是一个如上定义的节点
• `a.toNode`：这是一个如上定义的节点
• `a.datum`：这表示任何数据对象。
`Arc = namedtuple('Arc', ['fromNode','toNode','datum'])`

### DiGraphs

```def digraph_to_dict(G):
G_as_dict = dict([])
for a in G.setOfArcs:
if(a.fromNode not in G.setOfNodes):
err_msg = 'There is no Node {a.fromNode.uid!s} to match the Arc from {a.fromNode.uid!s} to {a.toNode.uid!s}'.format(**locals())
pdg(G)
raise KeyError(err_msg)
if(a.toNode not in G.setOfNodes):
err_msg = 'There is no Node {a.toNode.uid!s} to match the Arc from {a.fromNode.uid!s} to {a.toNode.uid!s}'.format(**locals())
pdg(G)
raise KeyError(err_msg)
G_as_dict[a.fromNode] = (G_as_dict[a.fromNode].union(frozenset([a]))) if (a.fromNode in G_as_dict) else frozenset([a])
for a in G.setOfArcs:
if(a.fromNode not in G.setOfNodes):
err_msg = 'There is no Node {a.fromNode.uid!s} to match the Arc from {a.fromNode.uid!s} to {a.toNode.uid!s}'.format(**locals())
raise KeyError(err_msg)
if a.toNode not in G_as_dict:
G_as_dict[a.toNode] = frozenset([])
return G_as_dict```

```def digraph_to_double_dict(G):
G_as_dict = dict([])
for a in G.setOfArcs:
if(a.fromNode not in G.setOfNodes):
err_msg = 'There is no Node {a.fromNode.uid!s} to match the Arc from {a.fromNode.uid!s} to {a.toNode.uid!s}'.format(**locals())
raise KeyError(err_msg)
if(a.toNode not in G.setOfNodes):
err_msg = 'There is no Node {a.toNode.uid!s} to match the Arc from {a.fromNode.uid!s} to {a.toNode.uid!s}'.format(**locals())
raise KeyError(err_msg)
if(a.fromNode not in G_as_dict):
G_as_dict[a.fromNode] = dict({a.toNode : frozenset([a])})
else:
if(a.toNode not in G_as_dict[a.fromNode]):
G_as_dict[a.fromNode][a.toNode] = frozenset([a])
else:
G_as_dict[a.fromNode][a.toNode] = G_as_dict[a.fromNode][a.toNode].union(frozenset([a]))

for a in G.setOfArcs:
if(a.fromNode not in G.setOfNodes):
err_msg = 'There is no Node {a.fromNode.uid!s} to match the Arc from {a.fromNode.uid!s} to {a.toNode.uid!s}'.format(**locals())
raise KeyError(err_msg)
if a.toNode not in G_as_dict:
G_as_dict[a.toNode] = dict({})
return G_as_dict```

```def find_node_by_uid(find_uid, G):
nodes = {n for n in G.setOfNodes if n.uid == find_uid}
if(len(nodes) != 1):
err_msg = 'Node with uid {find_uid!s} is not unique.'.format(**locals())
raise KeyError(err_msg)
return nodes.pop()```

Python中两种流行的graph表示形式分别是使用字典和使用对象。本文中的表示介于这两种常用的表示之间。

### Walks and Paths

`S_Arcs` 是一个在digraph G中的arcs的有限的序列(有序集合)，这样假设`a` 是`S_Arcs` 中的除了最后一个外的任意一个弧，`b` 是 `a` 在这序列之后的，则在`G.setOfNodes` 中必然有一个 `a.toNode = b.fromNode`这样的node `n = a.fromNode` 。

`S_Arcs`中的第一个arc 开始，直到`S_Arcs`中的最后一个，收集(按照顺序)在s圆弧中每两个连续的弧之间定义的所有节点n。标记在此操作期间的收集的节点的有序集合 `S_{Nodes}`

```def path_arcs_to_nodes(s_arcs):
s_nodes = list([])
arc_it = iter(s_arcs)
step_count = 0
last = None
try:
at_end = False
last = a1 = next(arc_it)
while (not at_end):
s_nodes += [a1.fromNode]
last = a2 = next(arc_it)
step_count += 1
if(a1.toNode != a2.fromNode):
err_msg = "Error at index {step_count!s} of Arc sequence.".format(**locals())
raise ValueError(err_msg)
a1 = a2
except StopIteration as e:
at_end = True

if(last is not None):
s_nodes += [last.toNode]

return s_nodes```
• 如果在序列 `S_Nodes`中任意一节点多次出现， `S_Arcs` 中一条在 digraph `G`Walk 。
• 反之，则称为 `S_Arcs` 中一条在digraph `G`的从`list(S_Nodes)[0]` 到 `list(S_Nodes)[-1]` 的 path 。

### 源点（Source Node）

```def source_nodes(G):
to_nodes = frozenset({a.toNode for a in G.setOfArcs})
sources = G.setOfNodes.difference(to_nodes)
return sources```

### 终端点(Terminal Node)

```def terminal_nodes(G):
from_nodes = frozenset({a.fromNode for a in G.setOfArcs})
terminals = G.setOfNodes.difference(from_nodes)
return terminals```

### Cuts and s-t Cuts

`Cut = namedtuple('Cut', ['setOfCutArcs'])`

```def cut_predecessors_of(n, cut, G):
allowed_arcs   = G.setOfArcs.difference(frozenset(cut.setOfCutArcs))
predecessors   = frozenset({})
previous_count = len(predecessors)
reach_fringe   = frozenset({n})
keep_going     = True
while( keep_going ):
reachable_from = frozenset({a.fromNode for a in allowed_arcs if (a.toNode in reach_fringe)})
reach_fringe   = reachable_from
predecessors   = predecessors.union(reach_fringe)
current_count  = len(predecessors)
keep_going     = current_count -  previous_count > 0
previous_count = current_count
return predecessors

def cut_successors_of(n, cut, G):
allowed_arcs   = G.setOfArcs.difference(frozenset(cut.setOfCutArcs))
successors     = frozenset({})
previous_count = len(successors)
reach_fringe   = frozenset({n})
keep_going     = True
while( keep_going ):
reachable_from = frozenset({a.toNode for a in allowed_arcs if (a.fromNode in reach_fringe)})
reach_fringe   = reachable_from
successors     = successors.union(reach_fringe)
current_count  = len(successors)
keep_going     = current_count -  previous_count > 0
previous_count = current_count
return successors```

```def get_first_part(cut, G):
firstPartFringe = frozenset({a.fromNode for a in cut.setOfCutArcs})
firstPart = firstPartFringe
for n in firstPart:
preds = cut_predecessors_of(n,cut,G)
firstPart = firstPart.union(preds)
return firstPart

def get_second_part(cut, G):
secondPartFringe = frozenset({a.toNode for a in cut.setOfCutArcs})
secondPart = secondPartFringe
for n in secondPart:
succs= cut_successors_of(n,cut,G)
secondPart = secondPart.union(succs)
return secondPart```

`cut` 是digraph `G` 的一个 cut 如果:

`(get_first_part(cut, G).union(get_second_part(cut, G)) == G.setOfNodes) and (len(get_first_part(cut, G).intersect(get_second_part(cut, G))) == 0)`

`cut` 是一个 x-y cut 如果：

` (x in get_first_part(cut, G)) and (y in get_second_part(cut, G) ) and (x != y)`

`STCut = namedtuple('STCut', ['s','t','cut'])`

### Flow Networks

`FlowNodeDatum = namedtuple('FlowNodeDatum', ['flowIn','flowOut'])`

`FlowArcDatum = namedtuple('FlowArcDatum', ['capacity','flow'])`

`flowNodeDatum.flowIn``flowNodeDatum.flowOut` 实数

`flowArcDatum.capacit``flowArcDatum.flow`也正实数。

```n.datum.flowIn = sum({a.datum.flow for a in G.Arcs if a.toNode == n})
n.datum.flowOut = sum({a.datum.flow for a in G.Arcs if a.fromNode == n})```

Digraph `G`现在代表一个flow network

`G`中的flow指在`G`的所有 `a``a.flow`

### Feasible Flows

• 1、 `G.setOfNodes` 中除了源点和终端点的所有node `n``n.datum.flowIn = n.datum.flowOut`
• 2、 `G.setOfNodes`中的每一个 arc `a``a.datum.capacity <= a.datum.flow`

## Cut Capacity

```def cut_capacity(stCut, G):
part_1 = get_first_part(stCut.cut,G)
part_2 = get_second_part(stCut.cut,G)
s_part = part_1 if stCut.s in part_1 else part_2
t_part = part_1 if stCut.t in part_1 else part_2
cut_capacity = sum({a.datum.capacity for a in stCut.cut.setOfCutArcs if ( (a.fromNode in s_part) and (a.toNode in t_part) )})
return cut_capacity```

## 最小容量切割（Minimum Capacity Cut）

`stCut = stCut(s,t,cut)` 是一个由 digraph `G` 表示的一个 flow network s-t cut

`    cut_capacity(stCut, G) < cut_capacity(stCutCompetitor, G)`

## Stripping the Flows Away

```def strip_flows(G):
new_nodes= frozenset( (Node(n.uid, FlowNodeDatum(0.0,0.0)) for n in G.setOfNodes) )
new_arcs = frozenset([])
for a in G.setOfArcs:
new_fromNode = Node(a.fromNode.uid, FlowNodeDatum(0.0,0.0))
new_toNode     = Node(a.toNode.uid, FlowNodeDatum(0.0,0.0))
new_arc            = Arc(new_fromNode, new_toNode, FlowArcDatum(a.datum.capacity, 0.0))
new_arcs          = new_arcs.union(frozenset([new_arc]))
return DiGraph(new_nodes, new_arcs)```

## 最大流量问题

`(len(list(source_nodes(G))) == 1) and (next(iter(source_nodes(G))) == s) and (len(list(terminal_nodes(G))) == 1) and (next(iter(terminal_nodes(G))) == t)`

`MaxFlowProblemState = namedtuple('MaxFlowProblemState', ['G','sourceNodeUid','terminalNodeUid','maxFlowProblemStateUid'])`

`sourceNodeUid = s.uid``terminalNodeUid = t.uid`, 以及 `maxFlowProblemStateUid` 是问题实例的标识符。

## 最大流量解决方案

`maxFlowProblem`代表最大流问题`maxFlowProblem`的解决方案可以由表示为 flow network 的digraph `H`.来表示。

Digraph `H`是输入`python maxFlowProblem`最大流量问题可行解决方案，若：

• 1、`strip_flows(maxFlowProblem.G) == strip_flows(H)`
• 2、`H`是一个 flow network，具有feasible flows

`strip_flows(G) == strip_flows(K)` and `find_node_by_uid(t.uid,G).flowIn < find_node_by_uid(t.uid,K).flowIn`

1、有向图 `G`与相应的最大流问题是相同的，不同之处是，任意节点弧的`n.datum.flowIn``n.datum.flowOut``a.datum.flow`可以是不同的。

2、代表具有可行流量（feasible flows）流网络（flow network ）

1. 所述`flowIn`节点对应于所述终端节点中的最大流问题是尽可能的大（当条件1和2都还满意）。

`mfps=MaxFlowProblemState(H, maxFlowProblem.sourceNodeUid, maxFlowProblem.terminalNodeUid, maxFlowProblem.maxFlowProblemStateUid)`

```def get_mfps_flow(mfps):
flow_from_s = find_node_by_uid(mfps.sourceNodeUid,mfps.G).datum.flowOut
flow_to_t      = find_node_by_uid(mfps.terminalNodeUid,mfps.G).datum.flowIn

if( (flow_to_t - flow_from_s) > 0):
raise Exception('Infeasible s-t flow')
return flow_to_t```

## s-t Cut Flow

`mfps`代表`MaxFlowProblemState`，让`stCut`代表cut的 `mfps.G`。所述 cut flow`stCut`定义为：

```def get_stcut_flow(stCut,mfps):
s = find_node_by_uid(mfps.sourceNodeUid, mfps.G)
t = find_node_by_uid(mfps.terminalNodeUid, mfps.G)
part_1 = get_first_part(stCut.cut,mfps.G)
part_2 = get_second_part(stCut.cut,mfps.G)
s_part = part_1 if s in part_1 else part_2
t_part = part_1 if t in part_1 else part_2
s_t_sum = sum(a.datum.flow for a in mfps.G.setOfArcs if (a in stCut.cut.setOfCutArcs) and (a.fromNode in s_part) )
t_s_sum = sum(a.datum.flow for a in mfps.G.setOfArcs if (a in stCut.cut.setOfCutArcs) and (a.fromNode in t_part) )
cut_flow = s_t_sum - t_s_sum
return cut_flow```

st cut流是从包含源节点的分区到包含终端节点的分区的流的总和减去从包含终端节点的分区到包含源节点的分区的流的总和。

## 最大流量，最小切割

`maxFlowProblem` 问题代表一个最大的流问题，让 `maxFlowProblem`的解决方案用一个由表示为digraph  `H`flow network来表示。

`get_flow_value(H, maxFlowProblem) <= cut_capacity(minStCut, maxFlowProblem.G)`

## 最大流量，最小切数定理（The Max Flow, Min Cut Theorem）

For any network, the maximal flow value from `s` to `t` is equal to the minimum cut capacity of all cuts separating `s` and `t`. 对于任何网络，从s到t的最大流量的值等于所有的切割分离s和t中的最小割容量。

`get_flow_value(H, maxFlowProblem) == cut_capacity(minStCut, maxFlowProblem.G)`

## The Ford-Fulkerson Method and the Edmonds-Karp Algorithm

CLRS定义了Ford-Fulkerson方法（第26.2节）：

```FORD-FULKERSON-METHOD(G, s, t):
initialize flow f to 0
while there exists an augmenting path p in the residual graph G_f augment flow f along```

## 残差图（Residual Graph）

```ResidualDatum = namedtuple('ResidualDatum', ['residualCapacity','action'])

def agg_n_to_u_cap(n,u,G_as_dict):
arcs_out = G_as_dict[n]
return sum([a.datum.capacity for a in arcs_out if( (a.fromNode == n) and (a.toNode == u) ) ])

def agg_n_to_u_flow(n,u,G_as_dict):
arcs_out = G_as_dict[n]
return sum([a.datum.flow for a in arcs_out if( (a.fromNode == n) and (a.toNode == u) ) ])

def get_residual_graph_of(G):
G_as_dict = digraph_to_dict(G)
residual_nodes = G.setOfNodes
residual_arcs = frozenset([])

for n in G_as_dict:
arcs_from = G_as_dict[n]
nodes_to = frozenset([find_node_by_uid(a.toNode.uid,G) for a in arcs_from])

for u in nodes_to:
n_to_u_cap_sum  = agg_n_to_u_cap(n,u,G_as_dict)
n_to_u_flow_sum = agg_n_to_u_flow(n,u,G_as_dict)
if(n_to_u_cap_sum > n_to_u_flow_sum):
flow = round(n_to_u_cap_sum - n_to_u_flow_sum, TOL)
residual_arcs = residual_arcs.union( frozenset([Arc(n,u,datum=ResidualDatum(flow, 'push'))]) )
if(n_to_u_flow_sum > 0.0):
flow = round(n_to_u_flow_sum, TOL)
residual_arcs = residual_arcs.union( frozenset([Arc(u,n,datum=ResidualDatum(flow, 'pull'))]) )
return DiGraph(residual_nodes, residual_arcs)```

`agg_n_to_u_cap(n,u,G_as_dict)` 返回 `G.setOfArcs` 的一个所有弧 `a` 是当`a.fromNode = n``a.toNode = u` 时的子集时的 `a.datum.capacity`的和。

`agg_n_to_u_cap(n,u,G_as_dict)` 返回 `G.setOfArcs` 的一个所有弧 `a` 是当`a.fromNode = n``a.toNode = u` 时的子集时的 `a.datum.flow`的和。

1、这对`n,u``G_f`中不产生任何中如果没有 `a``G.setOfArcs``a.fromNode = n``a.toNode = u`

2、所述一对`n,u`产生`G_f.setOfArcs`中的 `a`，在其中`a`表示标记的一个 push flow弧`n``u` `a = Arc(n,u,datum=ResidualNode(n_to_u_cap_sum - n_to_u_flow_sum))`如果`n_to_u_cap_sum > n_to_u_flow_sum`

3、所述一对`n,u`产生`G_f.setOfArcs`中的 `a`，在其中`a`表示标记的一个 pull flow 弧`n``u` `a = Arc(n,u,datum=ResidualNode(n_to_u_cap_sum - n_to_u_flow_sum))`如果`n_to_u_flow_sum > 0.0`

## 增强路径

```def augment(augmentingPath, H):
augmentingPath = list(augmentingPath)
H_as_dict = digraph_to_dict(H)
new_nodes = frozenset({})
new_arcs = frozenset({})
visited_nodes  = frozenset({})
visited_arcs = frozenset({})

bottleneck_residualCapacity = min( augmentingPath, key=lambda a: a.datum.residualCapacity ).datum.residualCapacity
for x in augmentingPath:
from_node_uid = x.fromNode.uid if x.datum.action == 'push' else x.toNode.uid
to_node_uid = x.toNode.uid   if x.datum.action == 'push' else x.fromNode.uid
from_node = find_node_by_uid( from_node_uid, H )
to_node = find_node_by_uid( to_node_uid, H )
load = bottleneck_residualCapacity if x.datum.action == 'push' else -bottleneck_residualCapacity

for a in H_as_dict[from_node]:
if(a.toNode == to_node):
new_flow = a.datum.flow
new_from_node_flow_out = a.fromNode.datum.flowOut
new_to_node_flow_in = a.toNode.datum.flowIn

new_from_look = {n for n in new_nodes if (n.uid == a.fromNode.uid)}
new_to_look = {n for n in new_nodes if (n.uid == a.toNode.uid)  }
prev_from_node = new_from_look.pop() if (len(new_from_look)>0) else a.fromNode
prev_to_node = new_to_look.pop()   if (len(new_to_look)>0)   else a.toNode
new_nodes = new_nodes.difference(frozenset({prev_from_node}))
new_nodes = new_nodes.difference(frozenset({prev_to_node}))

if(test_sum > a.datum.capacity):
new_flow = a.datum.capacity
new_from_node_flow_out = prev_from_node.datum.flowOut - a.datum.flow + a.datum.capacity
new_to_node_flow_in    = prev_to_node.datum.flowIn - a.datum.flow + a.datum.capacity
elif(test_sum < 0.0):
new_flow = 0.0
new_from_node_flow_out = prev_from_node.datum.flowOut - a.datum.flow
new_to_node_flow_in    = prev_to_node.datum.flowIn - a.datum.flow
else:
new_flow = test_sum
new_from_node_flow_out = prev_from_node.datum.flowOut - a.datum.flow + new_flow
new_to_node_flow_in    = prev_to_node.datum.flowIn - a.datum.flow + new_flow

new_from_node_flow_out = round(new_from_node_flow_out, TOL)
new_to_node_flow_in    = round(new_to_node_flow_in, TOL)
new_flow               = round(new_flow, TOL)

new_from_node = Node(prev_from_node.uid, FlowNodeDatum(prev_from_node.datum.flowIn, new_from_node_flow_out))
new_to_node   = Node(prev_to_node.uid, FlowNodeDatum(new_to_node_flow_in, prev_to_node.datum.flowOut))
new_arc       = Arc(new_from_node, new_to_node, FlowArcDatum(a.datum.capacity, new_flow))

visited_nodes = visited_nodes.union(frozenset({a.fromNode,a.toNode}))
visited_arcs  = visited_arcs.union(frozenset({a}))
new_nodes     = new_nodes.union(frozenset({new_from_node, new_to_node}))
new_arcs      = new_arcs.union(frozenset({new_arc}))

not_visited_nodes = H.setOfNodes.difference(visited_nodes)
not_visited_arcs  = H.setOfArcs.difference(visited_arcs)

full_new_nodes = new_nodes.union(not_visited_nodes)
full_new_arcs  = new_arcs.union(not_visited_arcs)
G = DiGraph(full_new_nodes, full_new_arcs)

full_new_arcs_update = frozenset([])
for a in full_new_arcs:
new_from_node = a.fromNode
new_to_node   = a.toNode
new_from_node = find_node_by_uid( a.fromNode.uid, G )
new_to_node   = find_node_by_uid( a.toNode.uid, G )
full_new_arcs_update = full_new_arcs_update.union( {Arc(new_from_node, new_to_node, FlowArcDatum(a.datum.capacity, a.datum.flow))} )

G = DiGraph(full_new_nodes, full_new_arcs_update)

return G```

`K = augment(augmentingPath, H)`，那么`K`代表可行的最大流量的解决方案`maxFlowProblem`。为了使声明成真，所被`K`表示的流网络必须具有可行的流（不违反容量约束守恒约束）

## 一些来自Sedgewick和Wayne的证明

《 Algorithms, fourth edition by Robert Sedgewich and Kevin Wayne 》一书提供了一些精彩和短暂的证据（第892-894页）。我将在这里重新创建它们，尽管我将使用以前定义的语言。对这些证明的标签与Sedgewick书中的相同。

1. 存在容量等于流量`f`st切割
2. `f`最大流量
3. 没有增广路径相对`f`

## 从Ford-Fulkerson 到 Edmonds-Karp

1. 如何构建增强路径
2. 如果我们使用实数而不是整数，方法会终止吗？
3. 要终止（如果有）需要多长时间？

```def bfs(sourceNode, terminalNode, G_f):
G_f_as_dict = digraph_to_dict(G_f)
parent_arcs = dict([])
visited = frozenset([])
deq = deque([sourceNode])
while len(deq) > 0:
curr = deq.popleft()
if curr == terminalNode:
break
for a in G_f_as_dict.get(curr):
if (a.toNode not in visited):
visited = visited.union(frozenset([a.toNode]))
parent_arcs[a.toNode] = a
deq.extend([a.toNode])
path = list([])
curr = terminalNode
while(curr != sourceNode):
if (curr not in parent_arcs):
err_msg = 'No augmenting path from {} to {}.'.format(sourceNode.uid, terminalNode.uid)
raise StopIteration(err_msg)
path.extend([parent_arcs[curr]])
curr = parent_arcs[curr].fromNode
path.reverse()

test = deque([path[0].fromNode])
for a in path:
if(test[-1] != a.fromNode):
raise ValueError(err_msg)
test.extend([a.toNode])

return path```

Edmonds-Karp算法执行`O(NA^2)`。如果在大多数`NA/2` 的路径将在算法中进行探讨，探索每个路径BFS`N+A`那么产品的最显著项，因此渐近复杂性`O(NA^2)`

```def edmonds_karp(mfp):
sid, tid = mfp.sourceNodeUid, mfp.terminalNodeUid
no_more_paths = False
loop_count = 0
while(not no_more_paths):
residual_digraph = get_residual_graph_of(mfp.G)
try:
asource = find_node_by_uid(mfp.sourceNodeUid, residual_digraph)
aterm   = find_node_by_uid(mfp.terminalNodeUid, residual_digraph)
apath   = bfs(asource, aterm, residual_digraph)
paint_mfp_path(mfp, loop_count, apath)
G = augment(apath, mfp.G)
s = find_node_by_uid(sid, G)
t = find_node_by_uid(tid, G)
mfp = MaxFlowProblemState(G, s.uid, t.uid, mfp.maxFlowProblemStateUid)
loop_count += 1
except StopIteration as e:
no_more_paths = True
return mfp```

```import uuid

def fast_get_mfps_flow(mfps):
flow_from_s = {n for n in mfps.G.setOfNodes if n.uid == mfps.sourceNodeUid}.pop().datum.flowOut
flow_to_t   = {n for n in mfps.G.setOfNodes if n.uid == mfps.terminalNodeUid}.pop().datum.flowIn

if( (flow_to_t - flow_from_s) > 0.00):
raise Exception('Infeasible s-t flow')
return flow_to_t

def fast_e_k_preprocess(G):
G = strip_flows(G)
get = dict({})
get['nodes'] = dict({})
get['node_to_node_capacity'] = dict({})
get['node_to_node_flow'] = dict({})
get['arcs'] = dict({})
get['residual_arcs'] = dict({})

for a in G.setOfArcs:
if(a.fromNode not in G.setOfNodes):
err_msg = 'There is no Node {a.fromNode.uid!s} to match the Arc from {a.fromNode.uid!s} to {a.toNode.uid!s}'.format(**locals())
raise KeyError(err_msg)
if(a.toNode not in G.setOfNodes):
err_msg = 'There is no Node {a.toNode.uid!s} to match the Arc from {a.fromNode.uid!s} to {a.toNode.uid!s}'.format(**locals())
raise KeyError(err_msg)
get['nodes'][a.fromNode.uid] = a.fromNode
get['nodes'][a.toNode.uid]   = a.toNode
lark = Arc(a.fromNode.uid, a.toNode.uid, FlowArcDatumWithUid(a.datum.capacity, a.datum.flow, uuid.uuid4()))
if(a.fromNode.uid not in get['arcs']):
get['arcs'][a.fromNode.uid] = dict({a.toNode.uid : dict({lark.datum.uid : lark})})
else:
if(a.toNode.uid not in get['arcs'][a.fromNode.uid]):
get['arcs'][a.fromNode.uid][a.toNode.uid] = dict({lark.datum.uid : lark})
else:
get['arcs'][a.fromNode.uid][a.toNode.uid][lark.datum.uid] = lark

for a in G.setOfArcs:
if a.toNode.uid not in get['arcs']:
get['arcs'][a.toNode.uid] = dict({})

for n in get['nodes']:
get['residual_arcs'][n] = dict()
get['node_to_node_capacity'][n] = dict()
get['node_to_node_flow'][n] = dict()
for u in get['nodes']:
n_to_u_cap_sum  = sum(a.datum.capacity for a in G.setOfArcs if (a.fromNode.uid == n) and (a.toNode.uid == u) )
n_to_u_flow_sum = sum(a.datum.flow     for a in G.setOfArcs if (a.fromNode.uid == n) and (a.toNode.uid == u) )

if(n_to_u_cap_sum > n_to_u_flow_sum):
flow = round(n_to_u_cap_sum - n_to_u_flow_sum, TOL)
get['residual_arcs'][n][u] = Arc(n,u,ResidualDatum(flow, 'push'))
if(n_to_u_flow_sum > 0.0):
flow = round(n_to_u_flow_sum, TOL)
get['residual_arcs'][u][n] = Arc(u,n,ResidualDatum(flow, 'pull'))

get['node_to_node_capacity'][n][u] = n_to_u_cap_sum
get['node_to_node_flow'][n][u]     = n_to_u_flow_sum

return get

def fast_bfs(sid, tid, get):
parent_of   = dict([])
visited     = frozenset([])
deq         = coll.deque([sid])
while len(deq) > 0:
n = deq.popleft()
if n == tid:
break
for u in get['residual_arcs'][n]:
if (u not in visited):
visited = visited.union(frozenset({u}))
parent_of[u] = get['residual_arcs'][n][u]
deq.extend([u])
path = list([])
curr = tid
while(curr != sid):
if (curr not in parent_of):
err_msg = 'No augmenting path from {} to {}.'.format(sid, curr)
raise StopIteration(err_msg)
path.extend([parent_of[curr]])
curr = parent_of[curr].fromNode
path.reverse()
return path

def fast_edmonds_karp(mfps):
sid, tid = mfps.sourceNodeUid, mfps.terminalNodeUid
get = fast_e_k_preprocess(mfps.G)
no_more_paths, loop_count = False, 0
while(not no_more_paths):
try:
apath   = fast_bfs(sid, tid, get)
get = fast_augment(apath, get)
loop_count += 1
except StopIteration as e:
no_more_paths = True
nodes = frozenset(get['nodes'].values())
arcs = frozenset({})
for from_node in get['arcs']:
for to_node in get['arcs'][from_node]:
for arc in get['arcs'][from_node][to_node]:
arcs |= frozenset({get['arcs'][from_node][to_node][arc]})

G = DiGraph(nodes, arcs)
mfps = MaxFlowProblemState(G, sourceNodeUid=sid, terminalNodeUid=tid, maxFlowProblemStateUid=mfps.maxFlowProblemStateUid)
return mfps```

**还要注意，由于具有“拉”的残差可能是扩展路径的一部分，所以在流网络的DiGraph中受影响的节点可能不在路径中`G!`

## 测试双边

```Bipartition = coll.namedtuple('Bipartition',['firstPart', 'secondPart', 'G'])
def bipartition(G):
nodes = frozenset(G.setOfNodes
arcs  = frozenset(G.setOfArcs)
arcs = arcs.union( frozenset( {Arc(a.toNode,a.fromNode,a.datum) for a in G.setOfArcs} ) )
H = DiGraph(nodes, arcs)
H_as_dict = digraph_to_dict(H)
color = dict([])
some_node = next(iter(H.setOfNodes))
deq = coll.deque([some_node])
color[some_node] = -1
while len(deq) > 0:
curr = deq.popleft()
for a in H_as_dict.get(curr):
if (a.toNode not in color):
color[a.toNode] = -1*color[curr]
deq.extend([a.toNode])
elif(color[curr] == color[a.toNode]):
print(curr,a.toNode)
raise Exception('Not Bipartite.')

firstPart  = frozenset( {n for n in color if color[n] == -1 } )
secondPart = frozenset( {n for n in color if color[n] == 1 } )

if( firstPart.union(secondPart) != G.setOfNodes ):
raise Exception('Not Bipartite.')

return Bipartition(firstPart, secondPart, G)```

## 最大双方匹配

`H.setOfArcs`将为每个包含一个`G.setOfArcs`。如果一个弧线 `a`处于`G.setOfArcs`并且`a.fromNode`处于`bipartition.firstPart`并且`a.toNode`在其中`bipartition.secondPart`并且包括`a``H`（添加`FlowArcDatum(1,0)`）中）。

```def solve_mbm( bipartition ):
s = Node(uuid.uuid4(), FlowNodeDatum(0,0))
t = Node(uuid.uuid4(), FlowNodeDatum(0,0))

translate = {}
arcs = frozenset([])
for a in bipartition.G.setOfArcs:
if ( (a.fromNode in bipartition.firstPart) and (a.toNode in bipartition.secondPart) ):
fark = Arc(a.fromNode,a.toNode,FlowArcDatum(1,0))
arcs = arcs.union({fark})
translate[frozenset({a.fromNode.uid,a.toNode.uid})] = a
elif ( (a.toNode in bipartition.firstPart) and (a.fromNode in bipartition.secondPart) ):
bark = Arc(a.toNode,a.fromNode,FlowArcDatum(1,0))
arcs = arcs.union({bark})
translate[frozenset({a.fromNode.uid,a.toNode.uid})] = a
arcs1 = frozenset( {Arc(s,n,FlowArcDatum(1,0)) for n in bipartition.firstPart } )
arcs2 = frozenset( {Arc(n,t,FlowArcDatum(1,0)) for n in bipartition.secondPart } )
arcs = arcs.union(arcs1.union(arcs2))

nodes = frozenset( {Node(n.uid,FlowNodeDatum(0,0)) for n in bipartition.G.setOfNodes} ).union({s}).union({t})
G = DiGraph(nodes, arcs)
mfp = MaxFlowProblemState(G, s.uid, t.uid, 'bipartite')
result = edmonds_karp(mfp)

lookup_set = {a for a in result.G.setOfArcs if (a.datum.flow > 0) and (a.fromNode.uid != s.uid) and (a.toNode.uid != t.uid)}
matching = {translate[frozenset({a.fromNode.uid,a.toNode.uid})] for a in lookup_set}

return matching```

## 最小节点覆盖

```ArcMatchingDatum = coll.namedtuple('ArcMatchingDatum', ['inMatching' ])
NodeMatchingDatum = coll.namedtuple('NodeMatchingDatum', ['visited'])

def dfs_helper(snodes, G):
sniter, do_stop = iter(snodes), False
visited, visited_uids = set(), set()
while(not do_stop):
try:
stack = [ next(sniter) ]
while len(stack) > 0:
curr = stack.pop()
if curr.uid not in visited_uids:
visited = visited.union( frozenset( {Node(curr.uid, NodeMatchingDatum(False))} ) )
visited_uids = visited.union(frozenset({curr.uid}))
succ = frozenset({a.toNode for a in G.setOfArcs if a.fromNode == curr})
stack.extend( succ.difference( frozenset(visited) ) )
except StopIteration as e:
stack, do_stop = [], True
return visited

def get_min_node_cover(matching, bipartition):
nodes = frozenset( { Node(n.uid, NodeMatchingDatum(False)) for n in bipartition.G.setOfNodes} )
G = DiGraph(nodes, None)
charcs = frozenset( {a for a in matching if ( (a.fromNode in bipartition.firstPart) and (a.toNode in bipartition.secondPart) )} )
arcs0 = frozenset( { Arc(find_node_by_uid(a.toNode.uid,G), find_node_by_uid(a.fromNode.uid,G), ArcMatchingDatum(True) ) for a in charcs } )
arcs1 = frozenset( { Arc(find_node_by_uid(a.fromNode.uid,G), find_node_by_uid(a.toNode.uid,G), ArcMatchingDatum(True) ) for a in matching.difference(charcs) } )
not_matching = bipartition.G.setOfArcs.difference( matching )
charcs = frozenset( {a for a in not_matching if ( (a.fromNode in bipartition.secondPart) and (a.toNode in bipartition.firstPart) )} )
arcs2 = frozenset( { Arc(find_node_by_uid(a.toNode.uid,G), find_node_by_uid(a.fromNode.uid,G), ArcMatchingDatum(False) ) for a in charcs } )
arcs3 = frozenset( { Arc(find_node_by_uid(a.fromNode.uid,G), find_node_by_uid(a.toNode.uid,G), ArcMatchingDatum(False) ) for a in not_matching.difference(charcs) } )
arcs = arcs0.union(arcs1.union(arcs2.union(arcs3)))

G = DiGraph(nodes, arcs)
bip = Bipartition({find_node_by_uid(n.uid,G) for n in bipartition.firstPart},{find_node_by_uid(n.uid,G) for n in bipartition.secondPart},G)
match_from_nodes = frozenset({find_node_by_uid(a.fromNode.uid,G) for a in matching})
match_to_nodes = frozenset({find_node_by_uid(a.toNode.uid,G) for a in matching})
snodes = bip.firstPart.difference(match_from_nodes).difference(match_to_nodes)
visited_nodes = dfs_helper(snodes, bip.G)
not_visited_nodes = bip.G.setOfNodes.difference(visited_nodes)

H = DiGraph(visited_nodes.union(not_visited_nodes), arcs)
cover1 = frozenset( {a.fromNode for a in H.setOfArcs if ( (a.datum.inMatching) and (a.fromNode.datum.visited) ) } )
cover2 = frozenset( {a.fromNode for a in H.setOfArcs if ( (a.datum.inMatching) and (not a.toNode.datum.visited) ) } )
min_cover_nodes = cover1.union(cover2)
true_min_cover_nodes = frozenset({find_node_by_uid(n.uid, bipartition.G) for n in min_cover_nodes})

return min_cover_nodes```

## 线性分配问题

`WeightArcDatum = namedtuple('WeightArcDatum', [weight])`

## Kuhn-Munkres算法

Kuhn-Munkres算法解决了线性分配问题。一个很好的实现可能需要`O(N^{4})`时间（代表问题的图中`N`节点数量）。更容易解释的实现（对于重新生成DiGraph的版本）和（用于维护DiGraph的版本）。这与Edmonds-Karp算法的两种不同实现类似。`O(N^{5})O(N^{4})`

KMA： 接着，形成一个新的有向图 `K`仅构成零个重量弧`H`节点 入射在这些。形成`bipartition`了对节点`K`，然后用`solve_mbm( bipartition )`获得的最大匹配`matching`上）`K`。如果`matching`是一个完美匹配`H`（该圆弧`matching`入射所有上的节点`H.setOfNodes`），则`matching`是到的最优解的线性分配问题

```def kuhn_munkres( bipartition ):
nodes = bipartition.G.setOfNodes
arcs = frozenset({})

for n in bipartition.firstPart:
z = min( {x.datum.weight for x in bipartition.G.setOfArcs if ( (x.fromNode == n) or (x.toNode == n) )} )
arcs = arcs.union( frozenset({Arc(a.fromNode, a.toNode, ArcWeightDatum(a.datum.weight - z)) }) )

for n in bipartition.secondPart:
z = min( {x.datum.weight for x in bipartition.G.setOfArcs if ( (x.fromNode == n) or (x.toNode == n) )} )
arcs = arcs.union( frozenset({Arc(a.fromNode, a.toNode, ArcWeightDatum(a.datum.weight - z)) }) )

H = DiGraph(nodes, arcs)

assignment, value = dict({}), 0
not_done = True

while( not_done ):
zwarcs = frozenset( {a for a in H.setOfArcs if a.datum.weight == 0} )
znodes = frozenset( {n.fromNode for n in zwarcs} ).union( frozenset( {n.toNode for n in zwarcs} ) )
K = DiGraph(znodes, zwarcs)
k_bipartition = bipartition(K)
matching = solve_mbm( k_bipartition )
mnodes = frozenset({a.fromNode for a in matching}).union(frozenset({a.toNode for a in matching}))
if( len(mnodes) == len(H.setOfNodes) ):
for a in matching:
assignment[ a.fromNode.uid ] = a.toNode.uid
value = sum({a.datum.weight for a in matching})
not_done = False
else:
node_cover = get_min_node_cover(matching, bipartition(K))
z = min( frozenset( {a.datum.weight for a in H.setOfArcs if a not in node_cover} ) )
nodes = H.setOfNodes
arcs1 = frozenset( {Arc(a.fromNode,a.toNode,ArcWeightDatum(a.datum.weigh-z)) for a in H.setOfArcs if ( (a.fromNode not in node_cover) and (a.toNode not in node_cover)} )
arcs2 = frozenset( {Arc(a.fromNode,a.toNode,ArcWeightDatum(a.datum.weigh)) for a in H.setOfArcs if ( (a.fromNode not in node_cover) != (a.toNode not in node_cover)} )
arcs3 = frozenset( {Arc(a.fromNode,a.toNode,ArcWeightDatum(a.datum.weigh+z)) for a in H.setOfArcs if ( (a.fromNode in node_cover) and (a.toNode in node_cover)} )
arcs = arcs1.union(arcs2.union(arcs3))
H = DiGraph(nodes,arcs)

return value, assignment```

\$ 2

\$ 3

\$ 3

\$ 3

\$ 2

\$ 3

\$ 3

\$ 3

\$ 2

\$ 9

\$ 9

\$ 1

\$ 2

\$ 3

\$ 3

\$ 0

\$ 3

\$ 2

\$ 3

\$ 0

\$ 3

\$ 3

\$ 2

\$ 0

\$ 9

\$ 9

\$ 1

\$ 0

Graph Cuts

st Flow

328 篇文章32 人订阅

0 条评论

## 相关文章

33250

9540

45950

### BZOJ4484: [Jsoi2015]最小表示(拓扑排序乱搞+bitset)

Description 【故事背景】 还记得去年JYY所研究的强连通分量的问题吗？去年的题目里，JYY研究了对于有向图的“加边”问题。对于图论有着强烈兴趣的J...

373100

1K10

17630

219100

38930

1K100

### 2014美团网笔试题目（总结）

http://blog.csdn.net/wzy_1988/article/details/12438143

20210