A
定义一个合法序列:元素下标奇、偶相间。
题意:给定n个元素,有两个操作——修改第i个元素;查询区间[L, R]合法序列的最大和。
思路:线段树区间合并啦,不过返回结构体好使。
#include <cstdio>
#include <algorithm>
#include <iostream>
#include <cmath>
#include <vector>
#include <cstring>
#include <queue>
#include <map>
#include <set>
#define CLR(a, b) memset(a, (b), sizeof(a))
#define ll o<<1
#define rr o<<1|1
#define M(l, r) (l + r) >> 1
using namespace std;
typedef long long LL;
const int MAXN = 1e5 + 1;
const int MAXM = 1e6 + 1;
const int INF = 1e9 + 10;
struct Ans {
LL oo, oe, ee, eo;
};
struct Tree {
int l, r;
Ans ans;
};
Tree tree[MAXN<<2];
void PushUp(int o) {
tree[o].ans.oo = max(tree[ll].ans.oe + tree[rr].ans.oo, tree[ll].ans.oo + tree[rr].ans.eo);
tree[o].ans.oo = max(tree[o].ans.oo, max(tree[ll].ans.oo, tree[rr].ans.oo));
tree[o].ans.oe = max(tree[ll].ans.oo + tree[rr].ans.ee, tree[ll].ans.oe + tree[rr].ans.oe);
tree[o].ans.oe = max(tree[o].ans.oe, max(tree[ll].ans.oe, tree[rr].ans.oe));
tree[o].ans.ee = max(tree[ll].ans.eo + tree[rr].ans.ee, tree[ll].ans.ee + tree[rr].ans.oe);
tree[o].ans.ee = max(tree[o].ans.ee, max(tree[ll].ans.ee, tree[rr].ans.ee));
tree[o].ans.eo = max(tree[ll].ans.eo + tree[rr].ans.eo, tree[ll].ans.ee + tree[rr].ans.oo);
tree[o].ans.eo = max(tree[o].ans.eo, max(tree[ll].ans.eo, tree[rr].ans.eo));
}
void Build(int o, int l, int r) {
tree[o].l = l; tree[o].r = r;
if(l == r) {
LL v; scanf("%lld", &v);
if(l & 1) {
tree[o].ans.oo = v;
tree[o].ans.oe = tree[o].ans.ee = tree[o].ans.eo = -1e15;
}
else {
tree[o].ans.ee = v;
tree[o].ans.oe = tree[o].ans.oo = tree[o].ans.eo = -1e15;
}
return ;
}
int mid = M(l, r);
Build(ll, l, mid); Build(rr, mid+1, r);
PushUp(o);
}
void Update(int o, int pos, LL v) {
if(tree[o].l == tree[o].r) {
if(tree[o].l & 1) tree[o].ans.oo = v;
else tree[o].ans.ee = v;
return ;
}
int mid = M(tree[o].l, tree[o].r);
if(pos <= mid) Update(ll, pos, v);
else Update(rr, pos, v);
PushUp(o);
}
Ans Best(Ans a, Ans b) {
Ans c;
c.oo = max(a.oe + b.oo, a.oo + b.eo);
c.oo = max(c.oo, max(a.oo, b.oo));
c.oe = max(a.oo + b.ee, a.oe + b.oe);
c.oe = max(c.oe, max(a.oe, b.oe));
c.ee = max(a.eo + b.ee, a.ee + b.oe);
c.ee = max(c.ee, max(a.ee, b.ee));
c.eo = max(a.eo + b.eo, a.ee + b.oo);
c.eo = max(c.eo, max(a.eo, b.eo));
return c;
}
Ans Query(int o, int L, int R) {
if(tree[o].l == L && tree[o].r == R) {
return tree[o].ans;
}
int mid = M(tree[o].l, tree[o].r);
if(R <= mid) {
return Query(ll, L, R);
}
else if(L > mid) {
return Query(rr, L, R);
}
else {
return Best(Query(ll, L, mid), Query(rr, mid + 1, R));
}
}
int main()
{
int t; scanf("%d", &t);
while(t--) {
int n, m; scanf("%d%d", &n, &m);
Build(1, 1, n);
while(m--) {
int op, x, y; scanf("%d%d%d", &op, &x, &y);
if(op == 0) {
Ans T = Query(1, x, y);
printf("%lld\n", max(max(max(T.oo, T.oe), T.ee), T.eo));
}
else {
Update(1, x, y);
}
}
}
return 0;
}
B
定义:f[i]是i的质因子个数。
题意:给定1-n共n个元素和m次查询,每次查询区间[L, R]里面最大的
gcd(f[i],f[j)其中L<=i,j<=R
。
思路:发现n的限制下,f[]值最大为7。那样我们统计一下前i个数里面f值为j出现多少次即可。
这样时间复杂度是 O(7∗7∗T)
#include <cstdio>
#include <algorithm>
#include <iostream>
#include <cmath>
#include <vector>
#include <cstring>
#include <queue>
#include <map>
#include <set>
#define CLR(a, b) memset(a, (b), sizeof(a))
#define ll o<<1
#define rr o<<1|1
using namespace std;
typedef long long LL;
const int MAXN = 1e6 + 1;
const int MAXM = 1e5 + 1;
const int INF = 1e9 + 10;
int gcd(int a, int b) {
return b == 0 ? a : gcd(b, a % b);
}
bool vis[MAXN];
int g[8][8];
int a[MAXN];
int sum[MAXN][8];
int main()
{
for(int i = 2; i < MAXN; i++) {
if(vis[i]) continue;
a[i] = 1;
for(int j = 2 * i; j < MAXN; j += i) {
vis[j] = true;
a[j]++;
}
}
for(int i = 1; i < MAXN; i++) {
for(int j = 1; j <= 7; j++) {
if(j == a[i]) {
sum[i][j] = sum[i-1][j] + 1;
}
else {
sum[i][j] = sum[i-1][j];
}
}
}
for(int i = 1; i <= 7; i++) {
for(int j = i + 1; j <= 7; j++) {
g[i][j] = gcd(i, j);
}
}
int t; scanf("%d", &t);
while(t--) {
int L, R; scanf("%d%d", &L, &R);
int ans = 1;
for(int i = 1; i <= 7; i++) {
vis[i] = false;
int num = sum[R][i] - sum[L-1][i];
if(num > 1) {
ans = max(ans, i);
}
if(num) vis[i] = true;
}
for(int i = 1; i <= 7; i++) {
if(!vis[i]) continue;
for(int j = i + 1; j <= 7; j++) {
if(!vis[j]) continue;
ans = max(ans, g[i][j]);
}
}
printf("%d\n", ans);
}
return 0;
}
D
题意:R只能沿着主对角线涂,B只能沿着副对角线涂,R、B都涂的地方为G,其中每个R、B只被涂了一次,.表示未被涂。问最少需要涂多少次。
模拟即可。
#include <cstdio>
#include <algorithm>
#include <iostream>
#include <cmath>
#include <vector>
#include <cstring>
#include <queue>
#include <map>
#include <set>
#define CLR(a, b) memset(a, (b), sizeof(a))
#define ll o<<1
#define rr o<<1|1
using namespace std;
typedef long long LL;
const int MAXN = 1e6 + 1;
const int MAXM = 1e5 + 1;
const int INF = 1e9 + 10;
char str[60][60];
bool vis[60][60];
int n, m, ans;
int go[2][2] = {1, -1, 1, 1};
bool judge(int x, int y) {
return x >= 0 && x < n && y >= 0 && y < m;
}
void DFS(int x, int y, int d, char op) {
str[x][y] = '.';
int nx = x + go[d][0];
int ny = y + go[d][1];
if(judge(nx, ny) && str[nx][ny] == op) {
DFS(nx, ny, d, op);
}
else {
ans++;
}
}
int main()
{
int t; scanf("%d", &t);
while(t--) {
scanf("%d", &n);
for(int i = 0; i < n; i++) {
scanf("%s", str[i]);
m = strlen(str[i]);
}
CLR(vis, false);
for(int i = 0; i < n; i++) {
for(int j = 0; j < m; j++) {
if(str[i][j] == 'G') {
str[i][j] = 'R';
vis[i][j] = true;
}
}
}
ans = 0;
for(int i = 0; i < n; i++) {
for(int j = 0; j < m; j++) {
if(str[i][j] == 'R') {
DFS(i, j, 1, 'R');
//cout << i << ' ' << j << ' ' << ans << endl;
}
}
}
for(int i = 0; i < n; i++) {
for(int j = 0; j < m; j++) {
if(vis[i][j]) {
str[i][j] = 'B';
}
}
}
for(int i = 0; i < n; i++) {
for(int j = 0; j < m; j++) {
if(str[i][j] == 'B') {
DFS(i, j, 0, 'B');
//cout << i << ' ' << j << ' ' << ans << endl;
}
}
}
printf("%d\n", ans);
}
return 0;
}
题意:0-n线段树,给定一个节点所代表的区间[L, R]问你包含该节点的线段树是否存在,存在输出最小的n值。
DFS一下即可,显然到某个程度就break了,不确定就只好试试了。。。
#include <cstdio>
#include <algorithm>
#include <iostream>
#include <cmath>
#include <vector>
#include <cstring>
#include <queue>
#include <map>
#include <set>
#define CLR(a, b) memset(a, (b), sizeof(a))
#define ll o<<1
#define rr o<<1|1
using namespace std;
typedef long long LL;
const int MAXN = 1e6 + 1;
const int MAXM = 1e5 + 1;
const int INF = 1e9 + 10;
LL ans = -1;
void DFS(LL L, LL R, LL pl, LL pr, int T) {
//cout << L << ' ' << R << endl;
if(L < 0 || L > R) return ;
if(pl == L && pr == R) return ;
if(ans != -1 && R > ans) return ;
if(L == 0) {
if(ans == -1) ans = R;
else {
ans = min(ans, R);
}
return ;
}
if(T > 10) {
return ;
}
DFS(L, 2 * R - L, L, R, T + 1); DFS(L, 2 * R + 1 - L, L, R, T + 1);
DFS((L - 1) * 2 - R, R, L, R, T + 1); DFS((L - 1) * 2 + 1 - R, R, L, R, T + 1);
}
int main()
{
LL L, R;
while(scanf("%lld%lld", &L, &R) != EOF) {
if(L == R || L == 0) {
printf("%lld\n", R);
continue;
}
ans = -1; DFS(L, R, -1, -1, 0);
printf("%lld\n", ans);
}
return 0;
}
K
签到题
#include <cstdio>
#include <algorithm>
#include <iostream>
#include <cmath>
#include <vector>
#include <cstring>
#include <queue>
#include <map>
#include <set>
#define CLR(a, b) memset(a, (b), sizeof(a))
#define ll o<<1
#define rr o<<1|1
using namespace std;
typedef long long LL;
const int MAXN = 1e6 + 1;
const int MAXM = 1e5 + 1;
const int INF = 1e9 + 10;
vector<int> G[110];
int son[110], d[110];
int ans, k;
void DFS(int u) {
son[u] = 1;
for(int i = 0; i < G[u].size(); i++) {
int v = G[u][i];
DFS(v);
son[u] += son[v];
}
ans += son[u] - 1 == k;
}
int main()
{
int n;
while(scanf("%d%d", &n, &k) != EOF) {
for(int i = 1; i <= n; i++) {
G[i].clear(); d[i] = 0;
}
for(int i = 0; i < n - 1; i++) {
int u, v; scanf("%d%d", &u, &v);
G[u].push_back(v); d[v]++;
}
int s;
for(int i = 1; i <= n; i++) {
if(d[i] == 0) {
s = i; break;
}
}
ans = 0; DFS(s);
printf("%d\n", ans);
}
return 0;
}