#include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #define rep(i,n) for(int (i)=0;(i)<(int)(n);++(i)) #define rer(i,l,u) for(int (i)=(int)(l);(i)<=(int)(u);++(i)) #define reu(i,l,u) for(int (i)=(int)(l);(i)<(int)(u);++(i)) #if defined(_MSC_VER) || __cplusplus > 199711L #define aut(r,v) auto r = (v) #else #define aut(r,v) __typeof(v) r = (v) #endif #define each(it,o) for(aut(it, (o).begin()); it != (o).end(); ++ it) #define all(o) (o).begin(), (o).end() #define pb(x) push_back(x) #define mp(x,y) make_pair((x),(y)) #define mset(m,v) memset(m,v,sizeof(m)) #define INF 0x3f3f3f3f #define INFL 0x3f3f3f3f3f3f3f3fLL using namespace std; typedef vector vi; typedef pair pii; typedef vector > vpii; typedef long long ll; template inline void amin(T &x, U y) { if(y < x) x = y; } template inline void amax(T &x, U y) { if(x < y) x = y; } template struct ModInt { static const int Mod = MOD; unsigned x; ModInt(): x(0) { } ModInt(signed sig) { int sigt = sig % MOD; if(sigt < 0) sigt += MOD; x = sigt; } ModInt(signed long long sig) { int sigt = sig % MOD; if(sigt < 0) sigt += MOD; x = sigt; } int get() const { return (int)x; } ModInt &operator+=(ModInt that) { if((x += that.x) >= MOD) x -= MOD; return *this; } ModInt &operator-=(ModInt that) { if((x += MOD - that.x) >= MOD) x -= MOD; return *this; } ModInt &operator*=(ModInt that) { x = (unsigned long long)x * that.x % MOD; return *this; } ModInt operator+(ModInt that) const { return ModInt(*this) += that; } ModInt operator-(ModInt that) const { return ModInt(*this) -= that; } ModInt operator*(ModInt that) const { return ModInt(*this) *= that; } }; typedef ModInt<1000000007> mint; struct HeavyLightDecomposition { vector colors, positions; //Vertex -> Color, Vertex -> Offset vector lengths, parents, branches; //Color -> Int, Color -> Color, Color -> Offset vector parentnodes, depths; //Vertex -> Vertex, Vertex -> Int //vectorとかを避けて1次元にしたい時に使う //sortednodesの[lefts[v], rights[v])はvのsubtreeとなっている vector sortednodes, offsets; //Index -> Vertex, Color -> Index vector lefts, rights; //Vertex -> Index struct BuildDFSState { int i, len, parent; BuildDFSState() { } BuildDFSState(int i_, int l, int p): i(i_), len(l), parent(p) { } }; //両方の辺があってもいいし、親から子への辺だけでもよい void build(const vector &g, int root) { int n = g.size(); colors.assign(n, -1); positions.assign(n, -1); lengths.clear(); parents.clear(); branches.clear(); parentnodes.assign(n, -1); depths.assign(n, -1); sortednodes.clear(); offsets.clear(); lefts.assign(n, -1); rights.assign(n, -1); vector subtreesizes; measure(g, root, subtreesizes); typedef BuildDFSState State; depths[root] = 0; vector s; s.push_back(State(root, 0, -1)); while(!s.empty()) { State t = s.back(); s.pop_back(); int i = t.i, len = t.len; int index = sortednodes.size(); int color = lengths.size(); if(t.parent == -3) { rights[i] = index; continue; } if(t.parent != -2) { assert(parents.size() == color); parents.push_back(t.parent); branches.push_back(len); offsets.push_back(index); len = 0; } colors[i] = color; positions[i] = len; lefts[i] = index; sortednodes.push_back(i); int maxsize = -1, maxj = -1; each(j, g[i]) if(colors[*j] == -1) { if(maxsize < subtreesizes[*j]) { maxsize = subtreesizes[*j]; maxj = *j; } parentnodes[*j] = i; depths[*j] = depths[i] + 1; } s.push_back(State(i, -1, -3)); if(maxj == -1) { lengths.push_back(len + 1); }else { each(j, g[i]) if(colors[*j] == -1 && *j != maxj) s.push_back(State(*j, len, color)); s.push_back(State(maxj, len + 1, -2)); } } } void get(int v, int &c, int &p) const { c = colors[v]; p = positions[v]; } bool go_up(int &c, int &p) const { p = branches[c]; c = parents[c]; return c != -1; } inline const int *nodesBegin(int c) const { return &sortednodes[0] + offsets[c]; } inline const int *nodesEnd(int c) const { return &sortednodes[0] + (c+1 == offsets.size() ? sortednodes.size() : offsets[c+1]); } private: void measure(const vector &g, int root, vector &out_subtreesizes) const { out_subtreesizes.assign(g.size(), -1); vector s; s.push_back(root); while(!s.empty()) { int i = s.back(); s.pop_back(); if(out_subtreesizes[i] == -2) { int s = 1; each(j, g[i]) if(out_subtreesizes[*j] != -2) s += out_subtreesizes[*j]; out_subtreesizes[i] = s; }else { s.push_back(i); each(j, g[i]) if(out_subtreesizes[*j] == -1) s.push_back(*j); out_subtreesizes[i] = -2; } } } }; struct Val { mint val, coef; explicit Val(): val(), coef() { } explicit Val(mint val_, mint coef_): val(val_), coef(coef_) { } }; struct PathSum { mint sum, coefsum; PathSum(): sum(), coefsum() { } explicit PathSum(const Val &val): sum(val.val), coefsum(val.coef) { } PathSum &operator+=(const PathSum &that) { sum += that.sum; coefsum += that.coefsum; return *this; } PathSum operator+(const PathSum &that) const { return PathSum(*this) += that; } PathSum reverse() const { return *this; } }; struct PathAdd { mint add; PathAdd() { } explicit PathAdd(mint add_): add(add_) { } PathAdd &operator+=(const PathAdd &that) { add += that.add; return *this; } void addToVal(Val &val) const { val.val += add * val.coef; } void addToSum(PathSum &sum) const { sum.sum += add * sum.coefsum; } }; struct Node { Node *parent; Node *pathLeft, *pathRight; Val val; PathSum pathSum; PathAdd pathAdd; Node(): parent(NULL), pathLeft(NULL), pathRight(NULL), val(), pathSum(), pathAdd() { } bool isPathRoot() const { return !parent; } static PathSum getPathSum(const Node *p) { if(!p) return PathSum(); PathSum pathSum = p->pathSum; p->pathAdd.addToSum(pathSum); return pathSum; } static void addToPath(Node *p, const PathAdd &add) { if(p != NULL) p->pathAdd += add; } PathSum getSingletonPathSumNoAdd() const { return PathSum(val); } void propagate() { if(pathLeft != NULL) pathLeft->pathAdd += pathAdd; if(pathRight != NULL) pathRight->pathAdd += pathAdd; pathAdd.addToVal(val); pathAdd.addToSum(pathSum); pathAdd = PathAdd(); } void update() { pathSum = getPathSum(pathLeft) + getSingletonPathSumNoAdd() + getPathSum(pathRight); } bool debugCheckUpdated() const { Node tmp = *this; tmp.update(); return memcmp(this, &tmp, sizeof(Node)) == 0; } }; struct BiasedHeavyLightDecompositionPathOnly { vector nodes; vector pathRoots; //ノード に対して、それが属するパスの path tree 上の根のノード vector subpathLeft, subpathRight; //path tree 上でそのノードが表す subpath の offset の区間 [left, right] vector globalOrder; HeavyLightDecomposition hld; void build(const vector &g, int root, const vector &initVal) { hld.build(g, root); int n = g.size(); nodes.assign(n, Node()); vector subtreeSize(n, 1); for(int ix = n-1; ix > 0; -- ix) { int i = hld.sortednodes[ix], p = hld.parentnodes[i]; subtreeSize[p] += subtreeSize[i]; } vector childrenSize = subtreeSize; for(int ix = 1; ix < n; ++ ix) { int i = hld.sortednodes[ix], p = hld.parentnodes[i]; if(hld.colors[i] == hld.colors[p]) childrenSize[p] -= subtreeSize[i]; } buildPathTrees(childrenSize); getGlobalOrder(root); subpathLeft.resize(n); subpathRight.resize(n); for(int ix = n-1; ix >= 0; -- ix) { int i = globalOrder[ix]; Node *a = &nodes[i]; a->val = initVal[i]; a->update(); subpathLeft[i] = a->pathLeft == NULL ? hld.positions[i] : subpathLeft[getNodeIndex(a->pathLeft)]; subpathRight[i] = a->pathRight == NULL ? hld.positions[i] : subpathRight[getNodeIndex(a->pathRight)]; } } Val getVal(int x) { propagatePath(&nodes[x]); return nodes[x].val; } void setVal(int x, const Val &val) { propagatePath(&nodes[x]); nodes[x].val = val; updatePath(&nodes[x]); } //node ancestor -> node decendant //ancestor は decendant の祖先である必要がある PathSum sumDecendingPath(int ancestor, int decendant) { Node *a = &nodes[decendant]; int ancestorColor = hld.colors[ancestor]; PathSum sum; while(a != NULL && hld.colors[getNodeIndex(a)] != ancestorColor) { sum = sumHeavyPathFromHead(a) + sum; a = goUpToParentPath(a); } assert(a != NULL); return sumHeavyPath(&nodes[ancestor], a) + sum; } void addToDecendingPath(int ancestor, int decendant, const PathAdd &add) { Node *a = &nodes[decendant]; int ancestorColor = hld.colors[ancestor]; while(a != NULL && hld.colors[getNodeIndex(a)] != ancestorColor) { addToHeavyPathFromHead(a, add); a = goUpToParentPath(a); } assert(a != NULL); addToHeavyPath(&nodes[ancestor], a, add); } //x -> y PathSum sumPath(int x, int y) { int z = lowestCommonAncestor(x, y); PathSum sum = sumDecendingPath(z, x).reverse(); if(y != z) { int child = findAncestorChild(z, y); assert(child != -1); sum += sumDecendingPath(child, y); } return sum; } //x -> y void addToPath(int x, int y, const PathAdd &add) { int z = lowestCommonAncestor(x, y); addToDecendingPath(z, x, add); if(y != z) { int child = findAncestorChild(z, y); assert(child != -1); addToDecendingPath(child, y, add); } } //使うのでここに置いておく int lowestCommonAncestor(int x, int y) const { int cx, px, cy, py; hld.get(x, cx, px); hld.get(y, cy, py); while(cx != cy) { if(hld.depths[*hld.nodesBegin(cx)] < hld.depths[*hld.nodesBegin(cy)]) hld.go_up(cy, py); else hld.go_up(cx, px); } return hld.nodesBegin(cx)[min(px, py)]; } //ancestorの直接の子でdecendantの祖先であるものを返す。 //存在しないなら-1を返す。 int findAncestorChild(int ancestor, int decendant) const { int ac, ap; int c, p; hld.get(ancestor, ac, ap); hld.get(decendant, c, p); int prevc = -1; while(c != ac) { prevc = c; if(!hld.go_up(c, p)) return -1; } if(prevc == -1 || ap != p) { if(ap >= p) return -1; else return hld.nodesBegin(ac)[ap + 1]; }else { return hld.nodesBegin(prevc)[0]; } } private: int getNodeIndex(const Node *a) const { return static_cast(a - &nodes[0]); } Node *goUpToParentPath(const Node *a) { int c, p; hld.get(getNodeIndex(a), c, p); if(!hld.go_up(c, p)) return NULL; else return &nodes[hld.nodesBegin(c)[p]]; } void propagatePath(Node *a, const Node *ceiling = NULL) { if(a == ceiling) { a->propagate(); return; } Node *r = a, *q = a->parent; while(q != ceiling) { Node *p = q; q = p->parent; p->parent = r; r = p; } while(r != a) { Node *c = r->parent; r->parent = q; q = r; r->propagate(); r = c; } a->propagate(); } void updatePath(Node *a, const Node *ceiling = NULL) { while(a != ceiling) { a->update(); a = a->parent; } } PathSum sumHeavyPathFromHead(Node *a) { propagatePath(a); PathSum sum; while(1) { sum = Node::getPathSum(a->pathLeft) + a->getSingletonPathSumNoAdd() + sum; while(a->parent != NULL && a->parent->pathLeft == a) a = a->parent; if(a->parent == NULL) break; a = a->parent; } return sum; } PathSum sumHeavyPath(Node *l, Node *r) { const Node *lca = findLowestCommonAncestorOnPathTree(l, r); propagatePath(l); propagatePath(r, lca); assert(lca != NULL); PathSum leftSum, rightSum; while(l != lca) { leftSum = leftSum + l->getSingletonPathSumNoAdd() + Node::getPathSum(l->pathRight); while(l->parent != lca && l->parent->pathRight == l) l = l->parent; l = l->parent; } while(r != lca) { rightSum = Node::getPathSum(r->pathLeft) + r->getSingletonPathSumNoAdd() + rightSum; while(r->parent != lca && r->parent->pathLeft == r) r = r->parent; r = r->parent; } assert((l == lca || l == lca->pathLeft) && (r == lca || r == lca->pathRight)); return leftSum + lca->getSingletonPathSumNoAdd() + rightSum; } void addToHeavyPathFromHead(Node *a, const PathAdd &add) { // propagatePath(a); Node *org_a = a; while(1) { Node::addToPath(a->pathLeft, add); add.addToVal(a->val); while(a->parent != NULL && a->parent->pathLeft == a) a = a->parent; if(a->parent == NULL) break; a = a->parent; } updatePath(org_a); } void addToHeavyPath(Node *l, Node *r, const PathAdd &add) { // propagatePath(l); // propagatePath(r); Node *org_l = l, *org_r = r; Node *lca = findLowestCommonAncestorOnPathTree(l, r); assert(lca != NULL); while(l != lca) { Node::addToPath(l->pathRight, add); add.addToVal(l->val); while(l->parent != lca && l->parent->pathRight == l) l = l->parent; l = l->parent; } while(r != lca) { Node::addToPath(r->pathLeft, add); add.addToVal(r->val); while(r->parent != lca && r->parent->pathLeft == r) r = r->parent; r = r->parent; } assert((l == lca || l == lca->pathLeft) && (r == lca || r == lca->pathRight)); add.addToVal(lca->val); updatePath(org_l, lca); updatePath(org_r, lca); updatePath(lca); } //lとrが同じpath treeに含まれることを仮定する Node *findLowestCommonAncestorOnPathTree(Node *l, Node *r) const { int lPos = hld.positions[getNodeIndex(l)]; int rPos = hld.positions[getNodeIndex(r)]; if(lPos > rPos) { swap(l, r); swap(lPos, rPos); } Node *a = l; while(1) { if(rPos <= subpathRight[getNodeIndex(a)]) return a; a = a->parent; } } void buildPathTrees(const vector &sizes) { vector weights, childL, childR; pathRoots.resize(nodes.size()); int C = hld.lengths.size(); for(int c = 0; c < C; ++ c) { int len = hld.lengths[c]; const int *path = hld.nodesBegin(c); weights.resize(len); for(int j = 0; j < len; ++ j) weights[j] = sizes[path[j]]; int rootj = makeBiasedBinarySearchTree(weights, childL, childR); int rootNode = path[rootj]; for(int j = 0; j < len; ++ j) pathRoots[path[j]] = rootNode; nodes[rootNode].parent = NULL; for(int j = 0; j < len; ++ j) { Node *a = &nodes[path[j]]; Node *l = childL[j] == -1 ? NULL : &nodes[path[childL[j]]]; Node *r = childR[j] == -1 ? NULL : &nodes[path[childR[j]]]; if((a->pathLeft = l) != NULL) l->parent = a; if((a->pathRight = r) != NULL) r->parent = a; } } } //weightsは破壊される int makeBiasedBinarySearchTree(vector &weights, vector &resL, vector &resR) { int n = weights.size(); weights.resize(n + 1); int sum = 0; for(int i = 0; i < n; ++ i) { int w = weights[i]; weights[i] = sum; sum += w; } weights[n] = sum; resL.resize(n); resR.resize(n); return makeBiasedBinarySearchTreeRec(-1, 0, n, weights, resL, resR); } //最初2倍してく2分探索でうまくコストを log(小さい方のサイズ) にすませるようにすれば O(n) にできる。 //けど、ここではやってない。この単純な2分探索でも、任意の重みで呼ばれるわけではないのであんまりコストがかかることはない気がする。 int makeBiasedBinarySearchTreeRec(int p, int i, int j, const vector &prefixSums, vector &resL, vector &resR) { if(i == j) return -1; //prefixSums[mid+1] - prefixSums[i] >= prefixSums[j] - prefixSums[mid] //prefixSums[mid] + prefixSums[mid+1] >= prefixSums[i] + prefixSums[j] int mid; if(i + 1 == j) { mid = i; }else { int t = prefixSums[i] + prefixSums[j]; int l = i, u = j-1; while(u - l > 0) { int m = (l + u) / 2; if(prefixSums[m] + prefixSums[m+1] >= t) u = m; else l = m + 1; } mid = u; } assert(mid < j); resL[mid] = makeBiasedBinarySearchTreeRec(mid * 2 + 0, i, mid, prefixSums, resL, resR); resR[mid] = makeBiasedBinarySearchTreeRec(mid * 2 + 1, mid + 1, j, prefixSums, resL, resR); return mid; } void getGlobalOrder(int globalRoot) { globalOrder.clear(); globalOrder.reserve(nodes.size()); vector stk; int C = hld.lengths.size(); for(int c = 0; c < C; ++ c) { stk.push_back(&nodes[pathRoots[hld.nodesBegin(c)[0]]]); while(!stk.empty()) { const Node *a = stk.back(); stk.pop_back(); if(a == NULL) continue; globalOrder.push_back(getNodeIndex(a)); stk.push_back(a->pathLeft); stk.push_back(a->pathRight); } } assert(globalOrder.size() == nodes.size()); } }; bool naivegetpath(int i, int p, int t, const vector &g, vi &path) { bool r = false; if(i == t) { r = true; }else { each(j, g[i]) if(*j != p) r = r || naivegetpath(*j, i, t, g, path); } if(r) path.push_back(i); return r; } #ifdef MY_LOCAL_RUN #include "C:\Dropbox\backup\implements\Util\MyAssert.hpp" #undef assert #define assert my_assert #define TEST #endif int main() { int N; for(int iii = 0; ; ++ iii) { #ifndef TEST if(!~scanf("%d", &N)) break; #else if(iii % 100 == 0) cerr << iii << "\r", cerr.flush(); N=rand()%10+1; #endif vector S(N), C(N); rep(i, N) { #ifndef TEST scanf("%d", &S[i]); #else S[i]=rand()%100; #endif } rep(i, N) { #ifndef TEST scanf("%d", &C[i]); #else C[i]=rand()%100; #endif } vector initVals(N); rep(i, N) initVals[i] = Val(S[i], C[i]); vector g(N); rep(i, N-1) { int A, B; #ifndef TEST scanf("%d%d", &A, &B), -- A, -- B; #else A=i+1,B=rand()%(i+1); #endif g[A].push_back(B); g[B].push_back(A); } BiasedHeavyLightDecompositionPathOnly bhld; bhld.build(g, 0, initVals); #ifdef TEST vector naiveval(all(S)); #endif int Q; #ifndef TEST scanf("%d", &Q); #else Q=rand()%100+1; #endif rep(ii, Q) { int ty; #ifndef TEST scanf("%d", &ty); #else ty=rand()%2; #endif if(ty == 0) { int X, Y, Z; #ifndef TEST scanf("%d%d%d", &X, &Y, &Z), -- X, -- Y; #else X=rand()%N,Y=rand()%N,Z=rand()%100; #endif bhld.addToPath(X, Y, PathAdd(Z)); #ifdef TEST vi naivepath; naivegetpath(X, -1, Y, g, naivepath); each(j, naivepath) naiveval[*j] += mint(Z) * C[*j]; #endif }else { int X, Y; #ifndef TEST scanf("%d%d", &X, &Y), -- X, -- Y; #else X=rand()%N,Y=rand()%N; #endif PathSum pathSum = bhld.sumPath(X, Y); mint ans = pathSum.sum; #ifndef TEST printf("%d\n", ans.get()); #else vi naivepath; naivegetpath(X, -1, Y, g, naivepath); mint naivesum, naivecoefsum; each(j, naivepath) naivecoefsum += C[*j], naivesum += naiveval[*j]; if(ans.get() != naivesum.get()) cerr << ans.get() << " != " << naivesum.get() << endl; #endif } #ifdef TEST // rep(i, N) bhld.getVal(i); // rep(i, N) bhld.setVal(i, bhld.getVal(i)); rep(i, N) assert(bhld.nodes[i].debugCheckUpdated()); // rep(i, N) assert(bhld.getVal(i).val.x == naiveval[i].x); #endif } } return 0; }