Description
给定一个边带正权的连通无向图 \(G = (V, E)\), 其中 \(n = |V|, m = |E|\),\(n\) 个点从 \(1\) 到 \(n\) 依次编号, 给定三个正整数 \(u, v, L (u \neq v)\), 假设现在加入一条边权为 \(L\) 的边 \((u, v)\), 那么需要删掉最少多少条边, 才能够使得这条边既可能出现在最小 生成树上, 也可能出现在最大生成树上?
Input
第一行包含用空格隔开的两个整数, 分别为 \(n, m\);
接下来 \(m\) 行, 每行包含三个正整数 \(u, v, w\) 表示图 \(G\) 存在一条边权为 \(w\) 的边 \((u, v)\).
最后一行包含用空格隔开的三个整数, 分别为 \(u, v, L\);
数据保证图中没有自环.
Output
输出一行一个整数表示最少需要删掉的边的数量.
Sample Input
3 2
3 2 1
1 2 3
1 2 2
Sample Output
1
Data Range
对于 20% 的数据满足 \(n \leq 10, m \leq 20, L \leq 20\);
对于 50% 的数据满足 \(n \leq 300, m \leq 3000, L \leq 200\);
对于 100% 的数据满足 \(n \leq 20000, m \leq 200000, L \leq 20000\).
Explanation
就是说有多少个较短 (\(\lt L\)) 的边被删掉以后它们之间无法维持 \((u, v)\) 的连通性, 以及有多少个较长 (\(\gt L\)) 的边被删去以后它们无法维持 \((u, v)\) 的连通性, 将两个值加在一起就是最少删去的边数.
Source Code
#include <iostream>
#include <cstdlib>
#include <cstdio>
#include <queue>
#include <cstring>
#include <algorithm>
using namespace std;
typedef long long lli;
const int maxn = 20010, maxm = 1000100;
const int infinit = 0x3fffffff;
class Dinic
{
public:
struct edge
{
int u, v, flow;
edge *next, *rev;
};
int n, s, t, ecnt;
edge *edges[maxn], epool[maxm];
int level[maxn];
void add_edge(int u, int v, int flow)
{
edge *p = &epool[++ecnt],
*q = &epool[++ecnt];
p->u = u; p->v = v; p->flow = flow;
p->next = edges[u]; edges[u] = p;
q->u = v; q->v = u; q->flow = flow;
q->next = edges[v]; edges[v] = q;
p->rev = q; q->rev = p;
return ;
}
bool make_level(void)
{
memset(level, 0, sizeof(level));
queue<int> que;
que.push(s);
level[s] = 1;
while (!que.empty()) {
int p = que.front();
que.pop();
for (edge *ep = edges[p]; ep; ep = ep->next)
if (ep->flow && !level[ep->v]) {
level[ep->v] = level[p] + 1;
que.push(ep->v);
}
}
if (level[t] <= 0)
return false;
return true;
}
int dfs(int p, int mn)
{
if (p == t)
return mn;
int sum = 0, tmp;
for (edge *ep = edges[p]; ep && sum < mn; ep = ep->next)
if (level[ep->v] == level[p] + 1) {
tmp = dfs(ep->v, min(ep->flow, mn - sum));
if (tmp > 0) {
sum += tmp;
ep->flow -= tmp;
ep->rev->flow += tmp;
}
}
if (sum <= 0)
level[p] = 0;
return sum;
}
int dinic(void)
{
int sum = 0, tmp;
while (make_level()) {
tmp = dfs(s, infinit);
if (tmp <= 0)
break;
sum += tmp;
}
return sum;
}
} graph_1, graph_2;
struct query { int u, v, len; };
bool operator < (query a, query b) {
return a.len < b.len; }
int n, m, U, V, L;
query q[maxm];
int main(int argc, char** argv)
{
scanf("%d%d", &n, &m);
for (int i = 1; i <= m; i++)
scanf("%d%d%d", &q[i].u, &q[i].v, &q[i].len);
sort(q + 1, q + m + 1);
scanf("%d%d%d", &U, &V, &L);
// Building the **two** graphs
graph_1.n = graph_2.n = n;
graph_1.s = graph_2.s = U;
graph_1.t = graph_2.t = V;
for (int i = 1; i <= m; i++) {
if (q[i].len < L) {
graph_1.add_edge(q[i].u, q[i].v, 1);
} else if (q[i].len > L) {
graph_2.add_edge(q[i].u, q[i].v, 1);
}
}
// Evaluate the sum of the minimum cut
int res = graph_1.dinic() + graph_2.dinic();
printf("%d\n", res);
return 0;
}