线段树和树状数组的区间查询和区间更改
POJ 3468
线段树的代码长度,占用内存,运行时间都比树状数组多。。。不知道为啥。
线段树
用了lazy标记,tag
#include <cstdio>
#include <iostream>
#include <cstring>
using namespace std;
#define sc(x) scanf("%d", &x);
#define scs(x) scanf("%s", x);
#define scc(x) scanf("%c", &x);
const int maxn = 100010;
typedef long long ll;
struct node{
int l, r;
ll tag, sum;
}tree[maxn<<2];
int in[maxn];
ll sum;
void pushUp(int idx){
tree[idx].sum = tree[idx<<1].sum + tree[idx<<1|1].sum;
}
void build(int l, int r, int idx){
tree[idx].l = l;
tree[idx].r = r;
if(l == r){
tree[idx].sum = in[l];
return;
}
int mid = (l+r)>>1;
build(l, mid, idx<<1);
build(mid+1, r, idx<<1|1);
pushUp(idx);
}
void pushDown(int idx){
int mid = (tree[idx].l+tree[idx].r)>>1;
tree[idx<<1].sum += (mid - tree[idx].l+1)*tree[idx].tag;
tree[idx<<1|1].sum += (tree[idx].r - mid)*tree[idx].tag;
tree[idx<<1].tag += tree[idx].tag;
tree[idx<<1|1].tag += tree[idx].tag;
tree[idx].tag = 0;
}
void query(int l, int r, int idx){
if(tree[idx].l >= l && tree[idx].r <= r){
sum += tree[idx].sum;
return ;
}
if(tree[idx].tag != 0)
pushDown(idx);
int mid = (tree[idx].l+tree[idx].r)>>1;
if(r <= mid) query(l, r, idx<<1);
else if(l > mid) query(l, r, idx<<1|1);
else{
query(l, r, idx<<1);
query(l, r, idx<<1|1);
}
}
void updata(int l, int r, int val, int idx){
if(l <= tree[idx].l && r >= tree[idx].r){
tree[idx].sum += (tree[idx].r-tree[idx].l+1)*val;
tree[idx].tag += val;
return ;
}
if(tree[idx].tag != 0)
pushDown(idx);
int mid = (tree[idx].l+tree[idx].r)>>1;
if(r <= mid) updata(l, r, val, idx<<1);
else if(l > mid) updata(l, r, val, idx<<1|1);
else {
updata(l, r, val, idx<<1);
updata(l, r, val, idx<<1|1);
}
pushUp(idx);
}
int main(){
int n, k;
sc(n);sc(k);
for(int i = 1; i <= n; i++){
sc(in[i]);
}
build(1, n, 1);
for(int i = 1; i <= k; i++){
getchar();
char s;
scc(s);
int l, r;
sc(l);sc(r);
if(s=='Q'){
sum = 0;
query(l, r, 1);
printf("%lld\n", sum);
}else{
int val;
sc(val);
updata(l, r, val, 1);
}
}
return 0;
}
树状数组
用了差分
#include <cstdio>
#include <iostream>
#include <cstring>
using namespace std;
typedef long long ll;
const int MAXN = 100010;
int n;
ll sum1[MAXN],sum2[MAXN], a[MAXN]={0};
//区间更新,区间求和
ll lowbit(ll x){
return x&(-x);
}
void updata(ll i,ll k){
ll x = i; //因为x不变,所以得先保存i值
while(i <= n){
sum1[i] += k;
sum2[i] += k * (x-1);
i += lowbit(i);
}
}
ll getsum(ll i){ //求前缀和
ll res = 0, x = i;
while(i > 0){
res += x * sum1[i] - sum2[i];
i -= lowbit(i);
}
return res;
}
int main(){
int k;
scanf("%d%d", &n, &k);
for(int i = 1; i <= n; i++){
scanf("%lld", &a[i]);
updata(i,a[i] - a[i-1]);
}
//[x,y]区间内加上k
for(int i = 1; i <= k; i++){
char s;int x, y;
getchar();
scanf("%c %d %d", &s, &x, &y);
if(s=='Q'){
ll sum = getsum(y) - getsum(x-1);
printf("%lld\n", sum);
}else{
int val;
scanf("%d", &val);
updata(x, val);updata(y+1, -val);
}
}
return 0;
}