Anti LIS
Time Limit: 2000/1000 MS (Java/Others) Memory Limit: 32768/32768 K (Java/Others)Total Submission(s): 607 Accepted Submission(s): 157
Problem Description
Haven't you heard about Lost?
Having written a article named <Summaries of ALL Algorithms>, Lost is good at solved by algorithm problems(?). One day, GXX asked Lost to work out the Longest Increasing Subsequence(for short, LIS) of a given sequence {A_1, A_2, ..., A_N}. Knowing this problem well, Lost simply copied a program from his article and solved the problem in seconds. So that GXX became frustrated. She wanted to cheat Lost by removing some elements from the original sequence to make Lost's answer go wrong. For convinience, she would like to remove least number of elements.
Having written a article named <Summaries of ALL Algorithms>, Lost is good at solved by algorithm problems(?). One day, GXX asked Lost to work out the Longest Increasing Subsequence(for short, LIS) of a given sequence {A_1, A_2, ..., A_N}. Knowing this problem well, Lost simply copied a program from his article and solved the problem in seconds. So that GXX became frustrated. She wanted to cheat Lost by removing some elements from the original sequence to make Lost's answer go wrong. For convinience, she would like to remove least number of elements.
Input
The beginning of the input is an integer T(T <= 10), which is the number of test cases. T cases are followed. The first line of each test case is an integer N (1 <= N <= 1,000), which denotes the length of the sequence. The second line is N integer A_1, A_2, ..., A_N, which denote the given sequence.
Output
For each test case, print a line contains a single integer which is the minimum number of the removed elements.
思路:
把能形成LIS的点建边,求最小割即是所求的答案。
代码:
#include<bits/stdc++.h>
using namespace std;
const int maxn=2505;
const int st=0;
const int en=2504;
const int inf=1e9;
int a[maxn],dp[maxn];
struct edge
{
int to,cap,rev;
};
vector<edge>G[maxn];
int level[maxn],iter[maxn];
void add_edge(int from,int to,int cap)
{
G[from].push_back((edge){to,cap,G[to].size()});
G[to].push_back((edge){from,0,G[from].size()-1});
}
void bfs(int s)
{
memset(level,-1,sizeof(level));
queue<int>P;
level[s]=0;P.push(s);
while(!P.empty())
{
int v=P.front();P.pop();
for(int i=0;i<G[v].size();i++)
{
edge e=G[v][i];
if(e.cap>0&&level[e.to]<0)
{
level[e.to]=level[v]+1;
P.push(e.to);
}
}
}
}
int dfs(int v,int t,int f)
{
if(v==t)return f;
for(int &i=iter[v];i<G[v].size();i++)
{
edge &e=G[v][i];
if(e.cap>0&&level[e.to]>level[v])
{
int d=dfs(e.to,t,min(f,e.cap));
if(d>0)
{
e.cap-=d;
G[e.to][e.rev].cap+=d;
return d;
}
}
}
return 0;
}
int max_flow(int s,int t)
{
int flow=0;
while(1)
{
bfs(s);
if(level[t]<0)return flow;
memset(iter,0,sizeof(iter));
int f;
while((f=dfs(s,t,inf))>0)flow+=f;
}
}
int main()
{
int T;scanf("%d",&T);
while(T--)
{
for(int i=0;i<maxn;i++)
G[i].clear();
int n;scanf("%d",&n);
for(int i=1;i<=n;i++)
scanf("%d",&a[i]);
fill(dp,dp+maxn,1);
int ma=1;
for(int i=2;i<=n;i++)
{
for(int j=1;j<i;j++)
if(a[i]>a[j])
dp[i]=max(dp[i],dp[j]+1);
ma=max(ma,dp[i]);
}
for(int i=1;i<=n;i++)
{
if(dp[i]==1) add_edge(st,i,1);
if(dp[i]==ma) add_edge(i+n,en,1);
add_edge(i,i+n,1);
for(int j=i+1;j<=n;j++)
if(dp[i]+1==dp[j]&&a[j]>a[i])
add_edge(i+n,j,1);
}
printf("%d\n",max_flow(st,en));
}
return 0;
}
本文介绍了一种利用图论中的最小割算法解决最长递增子序列(LIS)问题的方法。通过构建特定图结构并求解最小割,可以找到移除最少元素使原LIS算法失效的方案。代码实现采用C++,详细展示了如何从输入序列构造图,并调用最大流算法求解最小割。
1761

被折叠的 条评论
为什么被折叠?



