這題其實可以 Greedy + 二分搜,我使用的是 maxflow + 二分搜, 一不小心就想歪了。
先離散化每個區間,之後將每個時間的區間流量設為時間的長度,並將對應的菜餚上菜的時間, 加上對應的時間(流量)邊,將菜餚的節點容量上限設為我們假設的每道菜都必須吃的時間長度, 最後流到匯點,每道菜流到匯點的流量為我們假設要吃的時間長度,最後檢查最大流是否等於我們假設的時間 * 菜餚數量。
二分搜每個時間,得到吃每道菜的最長總時間。
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
#pragma GCC optimize ("O3")
#pragma GCC target ("avx2")
#include <bits/stdc++.h>
#define PB push_back
#define MAXN 10310
#define BOARD 10020
#define FAREST 10000
#define NSIZE 110
using namespace std;
typedef struct
{
int to, cap, rev;
} NODE;
typedef pair<int, int> PII;
typedef vector< NODE > Net;
typedef vector< PII > G;
const int S=MAXN-2, T=MAXN-1;
int N;
Net graph[MAXN], base[MAXN];
G data;
vector<int> seg;
int level[MAXN];
int iter[MAXN];
void bfs(int s) {
memset(level, -1, sizeof(level));
queue<int> que;
level[s] = 0;
que.push(s);
while(!que.empty()) {
int v = que.front(); que.pop();
for(int i=0; i<graph[v].size(); ++i) {
NODE &e = graph[v][i];
if(e.cap >0 && level[e.to]<0) {
level[e.to] = level[v]+1;
que.push(e.to);
}
}
}
}
int dfs(int v, int t, int f) {
if(v==t) return f;
for(int &i = iter[v]; i<graph[v].size(); ++i) {
NODE &e = graph[v][i];
if(e.cap >0 && level[v]< level[e.to]) {
int d = dfs(e.to, t, min(f, e.cap));
if( d> 0) {
e.cap -= d;
graph[e.to][e.rev].cap +=d ;
return d;
}
}
}
return 0;
}
int max_flow()
{
int flow=0;
while(1) {
bfs(S);
if (level[T]<0) return flow;
memset(iter, 0x00, sizeof(iter));
int f;
while((f=dfs(S,T,1e9+1))>0) {
flow += f;
}
}
return 0;
}
void add_edge(Net *net, int from, int to, int cap) {
net[from].PB((NODE){to,cap,(int)net[to].size()});
net[to].PB((NODE){from, 0, (int)net[from].size()-1});
}
void build() { // build a huge net
// S=MAXN-2, T=MAXN-1
for (int i=1; i<seg.size(); ++i) {
add_edge(base, S, i-1, seg[i]-seg[i-1]); // S -> segments(each number has at most 1 capasity)
}
// so that quantized segment node is: [0, seg.size()-1)
}
void rebuild(int target) {
// S=MAXN-2, T=MAXN-1
#define NEWNODE1(i) (seg.size()+(i))
#define NEWNODE2(i) (seg.size()+data.size()+(i))
for (int i=0; i<MAXN; ++i) {
graph[i].clear(); // destroy original graph
graph[i] = base[i]; // restore. copy assignment
}
for (int i=0; i<data.size(); ++i) {
const int &l = data[i].first;
const int &r = data[i].second;
int left = lower_bound(seg.begin(), seg.end(), l)-seg.begin();
int right= lower_bound(seg.begin(), seg.end(), r)-seg.begin();
for (int j=left; j<right; ++j) {
add_edge(graph, j, NEWNODE1(i), seg[j+1]-seg[j]); // segments -> dishes
}
add_edge(graph, NEWNODE1(i), NEWNODE2(i), target); // this limits flow of each dish to "target" flow
add_edge(graph, NEWNODE2(i), T, target); // dishes -> groumet
}
#undef NEWNODE1
#undef NEWNODE2
}
inline bool checker(int d) {
rebuild(d);
int res = max_flow();
return d*N==res; // desired flow
}
int main(void) {
set<int> s;
scanf("%d", &N);
for (int i=0; i<N; ++i) {
int l ,r; scanf("%d%d", &l, &r);
if (l>r) swap(l,r);
data.push_back(make_pair(l,r));
s.insert(l); s.insert(r);
}
s.insert(0); // always first element
s.insert(FAREST);
seg.reserve(s.size());
for (set<int>::iterator v=s.begin(); v!=s.end(); ++v) seg.push_back(*v);
s.clear();
//for (const auto &v : seg) cout << v << endl;
build(); // build invariant part of graph
// binary search
int l=0, r=1.5e4;
while(r-l>1) {
int m = (l+r)>>1;
if (checker(m)) l = m;
else r = m;
}
printf("%d\n", l*N);
return 0;
}