Removed Interval
Time Limit: 4000/2000 MS (Java/Others) Memory Limit: 65536/65536 K (Java/Others)
Total Submission(s): 1265 Accepted Submission(s): 426
Problem Description
Given a sequence of numbers A=a1,a2,…,aN,
a subsequence b1,b2,…,bk of A is
referred as increasing if b1<b2<…<bk.
LY has just learned how to find the longest increasing subsequence (LIS).
Now that he has to select L consecutive numbers and remove them from A for some mysterious reasons. He can choose arbitrary starting position of the selected interval so that the length of the LIS of the remaining numbers is maximized. Can you help him with this problem?
Now that he has to select L consecutive numbers and remove them from A for some mysterious reasons. He can choose arbitrary starting position of the selected interval so that the length of the LIS of the remaining numbers is maximized. Can you help him with this problem?
Input
The first line of input contains a number T indicating
the number of test cases (T≤100).
For each test case, the first line consists of two numbers N and L as described above (1≤N≤100000,0≤L≤N). The second line consists of N integers indicating the sequence. The absolute value of the numbers is no greater than 109.
The sum of N over all test cases will not exceed 500000.
For each test case, the first line consists of two numbers N and L as described above (1≤N≤100000,0≤L≤N). The second line consists of N integers indicating the sequence. The absolute value of the numbers is no greater than 109.
The sum of N over all test cases will not exceed 500000.
Output
For each test case, output a single line consisting of “Case #X: Y”. X is
the test case number starting from 1. Y is
the maximum length of LIS after removing the interval.
Sample Input
2 5 2 1 2 3 4 5 5 3 5 4 3 2 1
Sample Output
Case #1: 3 Case #2: 1
题意:给定一个N个元素的序列,要求任选起点位置去掉一个长度L的连续区间,问剩下序列最大的LIS。
思路一:设置dp1[i]为以a[i]开头的LIS,dp2[i]为以a[i]结尾的LIS。我们需要维护的信息是 add + dp1[i] - 1,add表示去掉区间[i-L-1, i-1]且以a[i]结尾的LIS。
dp1[i]和dp2[i]正反跑一次O(nlogn)算法就求出来了,至于add可以考虑用一棵线段树去维护dp2[],映射id后,将dp2[id]更新到当前id上,找区间[1, i-id-1]最大值。边界状态没有维护到[N-L+1, N]区间,最后加个特判就好了。
AC代码:
#include <iostream>
#include <cstdio>
#include <cstring>
#include <cmath>
#include <cstdlib>
#include <algorithm>
#include <queue>
#include <stack>
#include <map>
#include <set>
#include <vector>
#include <string>
#define INF 0x3f3f3f3f
#define eps 1e-8
#define MAXN (100000+10)
#define MAXM (200000+10)
#define Ri(a) scanf("%d", &a)
#define Rl(a) scanf("%lld", &a)
#define Rf(a) scanf("%lf", &a)
#define Rs(a) scanf("%s", a)
#define Pi(a) printf("%d\n", (a))
#define Pf(a) printf("%.2lf\n", (a))
#define Pl(a) printf("%lld\n", (a))
#define Ps(a) printf("%s\n", (a))
#define W(a) while(a--)
#define CLR(a, b) memset(a, (b), sizeof(a))
#define MOD 1000000007
#define LL long long
#define lson o<<1, l, mid
#define rson o<<1|1, mid+1, r
#define ll o<<1
#define rr o<<1|1
#define PI acos(-1.0)
using namespace std;
struct Node{
int val, id;
};
Node num[MAXN];
bool cmp(Node a, Node b){
if(a.val != b.val)
return a.val < b.val;
else
return a.id > b.id;
}
struct Tree{
int l, r, Max;
};
Tree tree[MAXN<<2];
void PushUp(int o){
tree[o].Max = max(tree[ll].Max, tree[rr].Max);
}
void Build(int o, int l, int r)
{
tree[o].l = l; tree[o].r = r;
tree[o].Max = 0;
if(l == r)
return ;
int mid = (l + r) >> 1;
Build(lson); Build(rson);
}
void Update(int o, int pos, int v)
{
if(tree[o].l == tree[o].r)
{
tree[o].Max = v;
return ;
}
int mid = (tree[o].l + tree[o].r) >> 1;
if(pos <= mid)
Update(ll, pos, v);
else
Update(rr, pos, v);
PushUp(o);
}
int Query(int o, int L, int R)
{
if(tree[o].l == L && tree[o].r == R)
return tree[o].Max;
int mid = (tree[o].l + tree[o].r) >> 1;
if(R <= mid)
return Query(ll, L, R);
else if(L > mid)
return Query(rr, L, R);
else
return max(Query(ll, L, mid), Query(rr, mid+1, R));
}
int dp1[MAXN], dp2[MAXN], g[MAXN];
int main()
{
int t, kcase = 1; Ri(t);
W(t)
{
int N, L;
Ri(N); Ri(L);
for(int i = 1; i <= N; i++) Ri(num[i].val), g[i] = INF, num[i].id = i;
for(int i = N; i >= 1; i--)
{
int k = lower_bound(g+1, g+N+1, -num[i].val) - g;
dp1[i] = k;
g[dp1[i]] = min(g[dp1[i]], -num[i].val);
}
CLR(g, INF);
for(int i = 1; i <= N; i++)
{
int k = lower_bound(g+1, g+N+1, num[i].val) - g;
dp2[i] = k;
g[dp2[i]] = min(g[dp2[i]], num[i].val);
}
sort(num+1, num+N+1, cmp);
int ans = 0; Build(1, 1, N);
for(int i = 1; i <= N; i++)
{
Update(1, num[i].id, dp2[num[i].id]);
int pos = num[i].id - L - 1;
if(pos < 0) continue;
int add = 1;
if(pos > 0) add += Query(1, 1, pos);
//printf("%d %d\n", add, dp1[num[i].id]);
ans = max(add + dp1[num[i].id] - 1, ans);
}
if(N > L) ans = max(ans, dp2[N-L]);
printf("Case #%d: %d\n", kcase++, ans);
}
return 0;
}
思路二:LIS变形直接搞,在求a[i]的add的时候,在g[]数组里面只放入a[i-L-1]前面的信息,这样就忽略掉中间区间[i-L, i-1]。
AC代码:
#include <iostream>
#include <cstdio>
#include <cstring>
#include <cmath>
#include <cstdlib>
#include <algorithm>
#include <queue>
#include <stack>
#include <map>
#include <set>
#include <vector>
#include <string>
#define INF 0x3f3f3f3f
#define eps 1e-8
#define MAXN (100000+10)
#define MAXM (200000+10)
#define Ri(a) scanf("%d", &a)
#define Rl(a) scanf("%lld", &a)
#define Rf(a) scanf("%lf", &a)
#define Rs(a) scanf("%s", a)
#define Pi(a) printf("%d\n", (a))
#define Pf(a) printf("%.2lf\n", (a))
#define Pl(a) printf("%lld\n", (a))
#define Ps(a) printf("%s\n", (a))
#define W(a) while(a--)
#define CLR(a, b) memset(a, (b), sizeof(a))
#define MOD 1000000007
#define LL long long
#define lson o<<1, l, mid
#define rson o<<1|1, mid+1, r
#define ll o<<1
#define rr o<<1|1
#define PI acos(-1.0)
using namespace std;
struct Node{
int val, id;
};
Node num[MAXN];
int dp1[MAXN], g[MAXN];
int main()
{
int t, kcase = 1; Ri(t);
W(t)
{
int N, L;
Ri(N); Ri(L);
for(int i = 1; i <= N; i++) Ri(num[i].val), g[i] = INF, num[i].id = i;
for(int i = N; i >= 1; i--)
{
int k = lower_bound(g+1, g+N+1, -num[i].val) - g;
dp1[i] = k;
g[dp1[i]] = min(g[dp1[i]], -num[i].val);
}
CLR(g, INF); int ans = 0;
for(int i = 1; i <= N; i++)
{
int pos = i - L - 1;
if(pos > 0) {int k = lower_bound(g+1, g+N+1, num[pos].val) - g; g[k] = min(g[k], num[pos].val);}
int k = lower_bound(g+1, g+N+1, num[i].val) - g;
ans = max(ans, k + dp1[i] - 1);
}
if(N > L) {int k = lower_bound(g+1, g+N+1, num[N-L].val) - g; ans = max(ans, k);}
printf("Case #%d: %d\n", kcase++, ans);
}
return 0;
}