题目大意:
给定一颗根节点为1的树,有两种操作,第一种操作是将与根节点距离为L的节点权值全部加上val,第二个操作是查询以x为根节点的子树的权重。
思路:
思考后发现,以dfs序建立树状数组,方便查询,不方便修改,以bfs序建立树状数组,方便修改,不方便查询。
在计算子树权重的时候发现,如果我能算出 所有层 属于这棵子树的 点数*对应层需要加上的val,那么就得到了这棵树的总权重。但是显然暴力统计点数会超时,于是我们把用一个分块的想法,对于一层来说,如果这层的总点数小于块的大小,就暴力树状数组修改,如果大于快,就用一个数组记录一下修改的值,并且把这一层保存到一个large的vector里面,表示这些层我是没有计算到树状数组里的。
在查询的时候,先用dfs序弄出每一个节点的(l,r)区间,统计树状数组里的值,然后对large里面的层来说,由于我实现记录好了每个层节点的dfs序号,并且是按照从小打到的顺序的,对于每个large层来说,只要是大于等于序号 l ,小于等于序号 r 的节点都属于这个点,所以用upper_bound和lower_bound来计算就得到了点数,乘以每一层对应的val就可以了。
#include<cstdio>
#include<cstring>
#include<stdlib.h>
#include<algorithm>
#include<iostream>
#include<cmath>
#include<map>
#include<queue>
#include<vector>
#define CLR(a,b) memset(a,b,sizeof(a))
#define PI acos(-1)
using namespace std;
typedef long long ll;
const int inf=0x3f3f3f3f;
const int maxn=;
struct edge{
int to,Next;
}e[maxn];
int n,m;
int tot,head[maxn],u,v,L[maxn],R[maxn],time,dfn[maxn];
ll val[maxn];
int limit;
ll c[maxn];
inline int lowbit(int x){
return x&(-x);
}
inline void add(int x,ll val){
while(x<=n){
c[x]+=val;
x+=lowbit(x);
}
}
inline ll getsum(int x){
ll ans=;
while(x>)
{
ans+=c[x];
x-=lowbit(x);
}
return ans;
}
vector<int >large;//大的层
vector<int >pos[maxn];//每一层有哪些点
inline void init(){
CLR(head,-),tot=,time=;
large.clear();
CLR(c,),CLR(val,);
for(int i=;i<=n;i++){
pos[i].clear();
}
limit=; }
inline void addv(int u,int v){
e[++tot]={v,head[u]};
head[u]=tot;
} inline void dfs(int u,int deep){
dfn[u]=++time;
pos[deep].push_back(time);
L[u]=time;
for(int i=head[u];i!=-;i=e[i].Next)
{
int v=e[i].to;
dfs(v,deep+);
}
R[u]=time;
}
int main(){
while(scanf("%d%d",&n,&m)!=EOF)
{
init();
for(int i=;i<n;i++)
{
scanf("%d%d",&u,&v);
addv(u,v);
}
dfs(,);
for(int i=;i<=n;i++)
{
if(pos[i].size()>limit)
{
large.push_back(i);
}
}
int dis,x,op;
ll y;
while(m--)
{
scanf("%d",&op);
if(op==)
{
scanf("%d%lld",&dis,&y);
if(pos[dis].size()<=limit)
{
for(int i=;i<pos[dis].size();i++)
{
add(pos[dis][i],y);
}
}
else{
val[dis]+=y;
}
}else{
scanf("%d",&x);
// printf("l:%d r:%d\n",L[x],R[x]);
ll ans=getsum(R[x])-getsum(L[x]-);
for(int i=;i<large.size();i++)
{
ans+=val[large[i]]*(upper_bound(pos[large[i]].begin(),pos[large[i]].end(),R[x])-lower_bound(pos[large[i]].begin(),pos[large[i]].end(),L[x]));
}
printf("%lld\n",ans);
}
} }
} /*
1 6
1 0 1
2 1
1 0 3
2 1
1 0 1
1 0 1
*/
2.0 s
512 MB
standard input
standard output
You are given a directed tree with N with nodes numbered 1 to N and rooted at node 1. Each node initially contains 0 coins.
You have to handle a total of M operations:
- 1 L Y : Increase by Y the coins of all nodes which are at a distance L from root.
- 2 X : Report the sum of coins of all nodes in subtree rooted at node X.
First line contains N and M. Each of the next N - 1 lines contains u and v denoting directed edge from node numbered u to v.
Each of the next M lines contain queries of either Type 1 or 2.
For each query of Type 2, print the required sum.
Constraints
- 1 ≤ N ≤ 105
- 1 ≤ M ≤ 104
- 0 ≤ L ≤ Maximum height of tree
- 0 ≤ Y ≤ 109
- 1 ≤ X, u, v ≤ N
5 4
1 2
1 3
3 4
3 5
1 1 2
1 2 3
2 3
2 1
8
10
In first update nodes 2 and 3 are increased by 2 coins each.
In second update nodes 4 and 5 are increased by 3 each.