結果

問題 No.235 めぐるはめぐる (5)
ユーザー antaanta
提出日時 2015-08-26 00:08:39
言語 C++11
(gcc 11.4.0)
結果
AC  
実行時間 771 ms / 10,000 ms
コード長 16,398 bytes
コンパイル時間 2,087 ms
コンパイル使用メモリ 113,160 KB
実行使用メモリ 40,048 KB
最終ジャッジ日時 2023-09-25 18:25:54
合計ジャッジ時間 5,413 ms
ジャッジサーバーID
(参考情報)
judge13 / judge12
このコードへのチャレンジ
(要ログイン)

テストケース

テストケース表示
入力 結果 実行時間
実行使用メモリ
testcase_00 AC 771 ms
38,956 KB
testcase_01 AC 470 ms
40,048 KB
testcase_02 AC 653 ms
39,128 KB
権限があれば一括ダウンロードができます

ソースコード

diff #

#include <string>
#include <vector>
#include <algorithm>
#include <numeric>
#include <set>
#include <map>
#include <queue>
#include <iostream>
#include <sstream>
#include <cstdio>
#include <cmath>
#include <ctime>
#include <cstring>
#include <cctype>
#include <cassert>
#include <limits>
#include <functional>
#define rep(i,n) for(int (i)=0;(i)<(int)(n);++(i))
#define rer(i,l,u) for(int (i)=(int)(l);(i)<=(int)(u);++(i))
#define reu(i,l,u) for(int (i)=(int)(l);(i)<(int)(u);++(i))
#if defined(_MSC_VER) || __cplusplus > 199711L
#define aut(r,v) auto r = (v)
#else
#define aut(r,v) __typeof(v) r = (v)
#endif
#define each(it,o) for(aut(it, (o).begin()); it != (o).end(); ++ it)
#define all(o) (o).begin(), (o).end()
#define pb(x) push_back(x)
#define mp(x,y) make_pair((x),(y))
#define mset(m,v) memset(m,v,sizeof(m))
#define INF 0x3f3f3f3f
#define INFL 0x3f3f3f3f3f3f3f3fLL
using namespace std;
typedef vector<int> vi; typedef pair<int,int> pii; typedef vector<pair<int,int> > vpii; typedef long long ll;
template<typename T, typename U> inline void amin(T &x, U y) { if(y < x) x = y; }
template<typename T, typename U> inline void amax(T &x, U y) { if(x < y) x = y; }

#ifndef MY_LOCAL_RUN
#undef assert
#define assert(e) 
#endif

template<int MOD>
struct ModInt {
	static const int Mod = MOD;
	unsigned x;
	ModInt(): x(0) { }
	ModInt(signed sig) { int sigt = sig % MOD; if(sigt < 0) sigt += MOD; x = sigt; }
	ModInt(signed long long sig) { int sigt = sig % MOD; if(sigt < 0) sigt += MOD; x = sigt; }
	int get() const { return (int)x; }
	
	ModInt &operator+=(ModInt that) { if((x += that.x) >= MOD) x -= MOD; return *this; }
	ModInt &operator-=(ModInt that) { if((x += MOD - that.x) >= MOD) x -= MOD; return *this; }
	ModInt &operator*=(ModInt that) { x = (unsigned long long)x * that.x % MOD; return *this; }
	
	ModInt operator+(ModInt that) const { return ModInt(*this) += that; }
	ModInt operator-(ModInt that) const { return ModInt(*this) -= that; }
	ModInt operator*(ModInt that) const { return ModInt(*this) *= that; }
};
typedef ModInt<1000000007> mint;

struct HeavyLightDecomposition {
	vector<int> colors, positions;	//Vertex -> Color, Vertex -> Offset
	vector<int> lengths, parents, branches;	//Color -> Int, Color -> Color, Color -> Offset
	vector<int> parentnodes, depths;	//Vertex -> Vertex, Vertex -> Int
	//vector<FenwickTree>とかを避けて1次元にしたい時に使う
	//sortednodesの[lefts[v], rights[v])はvのsubtreeとなっている
	vector<int> sortednodes, offsets;	//Index -> Vertex, Color -> Index
	vector<int> lefts, rights;	//Vertex -> Index

	struct BuildDFSState {
		int i, len, parent;
		BuildDFSState() { }
		BuildDFSState(int i_, int l, int p): i(i_), len(l), parent(p) { }
	};

	//両方の辺があってもいいし、親から子への辺だけでもよい
	void build(const vector<vi> &g, int root) {
		int n = g.size();

		colors.assign(n, -1); positions.assign(n, -1);
		lengths.clear(); parents.clear(); branches.clear();
		parentnodes.assign(n, -1); depths.assign(n, -1);

		sortednodes.clear(); offsets.clear();
		lefts.assign(n, -1); rights.assign(n, -1);

		vector<int> subtreesizes;
		measure(g, root, subtreesizes);

		typedef BuildDFSState State;
		depths[root] = 0;
		vector<State> s;
		s.push_back(State(root, 0, -1));
		while(!s.empty()) {
			State t = s.back(); s.pop_back();
			int i = t.i, len = t.len;
			int index = sortednodes.size();
			int color = lengths.size();

			if(t.parent == -3) {
				rights[i] = index;
				continue;
			}

			if(t.parent != -2) {
				assert(parents.size() == color);
				parents.push_back(t.parent);
				branches.push_back(len);
				offsets.push_back(index);
				len = 0;
			}
			colors[i] = color;
			positions[i] = len;

			lefts[i] = index;
			sortednodes.push_back(i);

			int maxsize = -1, maxj = -1;
			each(j, g[i]) if(colors[*j] == -1) {
				if(maxsize < subtreesizes[*j]) {
					maxsize = subtreesizes[*j];
					maxj = *j;
				}
				parentnodes[*j] = i;
				depths[*j] = depths[i] + 1;
			}
			s.push_back(State(i, -1, -3));
			if(maxj == -1) {
				lengths.push_back(len + 1);
			}else {
				each(j, g[i]) if(colors[*j] == -1 && *j != maxj)
					s.push_back(State(*j, len, color));
				s.push_back(State(maxj, len + 1, -2));
			}
		}
	}
	
	void get(int v, int &c, int &p) const {
		c = colors[v]; p = positions[v];
	}
	bool go_up(int &c, int &p) const {
		p = branches[c]; c = parents[c];
		return c != -1;
	}

	inline const int *nodesBegin(int c) const { return &sortednodes[0] + offsets[c]; }
	inline const int *nodesEnd(int c) const { return &sortednodes[0] + (c+1 == offsets.size() ? sortednodes.size() : offsets[c+1]); }

private:
	void measure(const vector<vi> &g, int root, vector<int> &out_subtreesizes) const {
		out_subtreesizes.assign(g.size(), -1);
		vector<int> s;
		s.push_back(root);
		while(!s.empty()) {
			int i = s.back(); s.pop_back();
			if(out_subtreesizes[i] == -2) {
				int s = 1;
				each(j, g[i]) if(out_subtreesizes[*j] != -2)
					s += out_subtreesizes[*j];
				out_subtreesizes[i] = s;
			}else {
				s.push_back(i);
				each(j, g[i]) if(out_subtreesizes[*j] == -1)
					s.push_back(*j);
				out_subtreesizes[i] = -2;
			}
		}
	}
};

struct Val {
	mint val, coef;
	explicit Val(): val(), coef() { }
	explicit Val(mint val_, mint coef_): val(val_), coef(coef_) { }
};

struct PathSum {
	mint sum, coefsum;

	PathSum(): sum(), coefsum() { }

	explicit PathSum(const Val &val): sum(val.val), coefsum(val.coef) { }

	PathSum &operator+=(const PathSum &that) {
		sum += that.sum;
		coefsum += that.coefsum;
		return *this;
	}
	PathSum operator+(const PathSum &that) const { return PathSum(*this) += that; }

	PathSum reverse() const {
		return *this;
	}
};

struct PathAdd {
	mint add;
	PathAdd() { }
	explicit PathAdd(mint add_): add(add_) { }
	PathAdd &operator+=(const PathAdd &that) { add += that.add; return *this; }
	void addToVal(Val &val) const { val.val += add * val.coef; }
	void addToSum(PathSum &sum) const { sum.sum += add * sum.coefsum; }
};

struct Node {
	Node *parent;
	Node *pathLeft, *pathRight;

	Val val;
	PathSum pathSum;
	PathAdd pathAdd;

	Node(): parent(NULL), pathLeft(NULL), pathRight(NULL),
		val(), pathSum(), pathAdd() { }

	bool isPathRoot() const { return !parent; }

	static PathSum getPathSum(const Node *p) {
		if(!p) return PathSum();
		PathSum pathSum = p->pathSum;
		p->pathAdd.addToSum(pathSum);
		return pathSum;
	}

	static mint getPathSum2(const Node *p) {
		if(!p) return mint();
		return p->pathSum.sum + p->pathAdd.add * p->pathSum.coefsum;
	}

	static void addToPath(Node *p, const PathAdd &add) {
		if(p != NULL)
			p->pathAdd += add;
	}

	PathSum getSingletonPathSum() const {
		return PathSum(val);
	}

	void propagate() {
		if(pathAdd.add.x != 0) {
			if(pathLeft != NULL) pathLeft->pathAdd += pathAdd;
			if(pathRight != NULL) pathRight->pathAdd += pathAdd;
			pathAdd.addToVal(val);
			pathAdd.addToSum(pathSum);
			pathAdd = PathAdd();
		}
	}

	void update() {
		pathSum = getPathSum(pathLeft) + getSingletonPathSum() + getPathSum(pathRight);
	}

	bool debugCheckUpdated() const {
		Node tmp = *this;
		tmp.update();
		return memcmp(this, &tmp, sizeof(Node)) == 0;
	}

};

struct BiasedHeavyLightDecompositionPathOnly {
	vector<Node> nodes;
	vector<int> pathRoots;	//ノード に対して、それが属するパスの path tree 上の根のノード
	vector<int> subpathLeft, subpathRight;	//path tree 上でそのノードが表す subpath の offset の区間 [left, right]
	vector<int> globalOrder;
	HeavyLightDecomposition hld;

	void build(const vector<vi> &g, int root, const vector<Val> &initVal) {
		hld.build(g, root);

		int n = g.size();
		nodes.assign(n, Node());

		vector<int> subtreeSize(n, 1);
		for(int ix = n-1; ix > 0; -- ix) {
			int i = hld.sortednodes[ix], p = hld.parentnodes[i];
			subtreeSize[p] += subtreeSize[i];
		}

		vector<int> childrenSize = subtreeSize;
		for(int ix = 1; ix < n; ++ ix) {
			int i = hld.sortednodes[ix], p = hld.parentnodes[i];
			if(hld.colors[i] == hld.colors[p])
				childrenSize[p] -= subtreeSize[i];
		}

		buildPathTrees(childrenSize);

		getGlobalOrder(root);

		subpathLeft.resize(n);
		subpathRight.resize(n);
		for(int ix = n-1; ix >= 0; -- ix) {
			int i = globalOrder[ix];
			Node *a = &nodes[i];
			a->val = initVal[i];
			a->update();
			subpathLeft[i] = a->pathLeft == NULL ? hld.positions[i] : subpathLeft[getNodeIndex(a->pathLeft)];
			subpathRight[i] = a->pathRight == NULL ? hld.positions[i] : subpathRight[getNodeIndex(a->pathRight)];
		}
	}

	Val getVal(int x) {
		propagatePath(&nodes[x]);
		return nodes[x].val;
	}

	void setVal(int x, const Val &val) {
		propagatePath(&nodes[x]);
		nodes[x].val = val;
		updatePath(&nodes[x]);
	}

	//x -> y
	mint sumPath(int x, int y) {
		int z = lowestCommonAncestor(x, y);
		mint sum;
		sum += sumPathFromRoot(x);
		sum += sumPathFromRoot(y);
		sum -= sumPathFromRoot(z) * 2;
		sum += nodes[z].val.val;
		return sum;
	}

	mint sumPathFromRoot(int x) {
		Node *a = &nodes[x];
		mint sum;
		while(a != NULL) {
			sum += sumHeavyPathFromHead(a);
			a = goUpToParentPath(a);
		}
		return sum;
	}

	//x -> y
	void addToPath(int x, int y, const PathAdd &add) {
		int z = lowestCommonAncestor(x, y);
		addToPathFromRoot(x, add);
		addToPathFromRoot(y, add);
		addToPathFromRoot(z, PathAdd(add.add * -2));
		add.addToVal(nodes[z].val);
		updatePath(&nodes[z]);
	}

	void addToPathFromRoot(int x, const PathAdd &add) {
		Node *a = &nodes[x];
		while(a != NULL) {
			addToHeavyPathFromHead(a, add);
			a = goUpToParentPath(a);
		}
	}

	//使うのでここに置いておく
	int lowestCommonAncestor(int x, int y) const {
		int cx, px, cy, py;
		hld.get(x, cx, px);
		hld.get(y, cy, py);
		while(cx != cy) {
			if(hld.depths[*hld.nodesBegin(cx)] < hld.depths[*hld.nodesBegin(cy)])
				hld.go_up(cy, py);
			else
				hld.go_up(cx, px);
		}
		return hld.nodesBegin(cx)[min(px, py)];
	}
private:
	int getNodeIndex(const Node *a) const {
		return static_cast<int>(a - &nodes[0]);
	}

	Node *goUpToParentPath(const Node *a) {
		int c, p;
		hld.get(getNodeIndex(a), c, p);
		if(!hld.go_up(c, p))
			return NULL;
		else
			return &nodes[hld.nodesBegin(c)[p]];
	}

	void propagatePath(Node *a) {
		Node *r = a, *q = a->parent;
		while(q != NULL) {
			Node *p = q;
			q = p->parent;
			p->parent = r;
			r = p;
		}
		while(r != a) {
			Node *c = r->parent;
			r->parent = q;
			q = r;
			r->propagate();
			r = c;
		}
		a->propagate();
	}

	void updatePath(Node *a) {
		while(a != NULL) {
			a->update();
			a = a->parent;
		}
	}

	mint sumHeavyPathFromHead(Node *a) {
		propagatePath(a);
		mint sum;
		while(1) {
			sum += Node::getPathSum2(a->pathLeft);
			sum += a->val.val;
			while(a->parent != NULL && a->parent->pathLeft == a)
				a = a->parent;
			if(a->parent == NULL)
				break;
			a = a->parent;
		}
		return sum;
	}

	void addToHeavyPathFromHead(Node *a, const PathAdd &add) {
		Node *orgA = a;
//		propagatePath(a);
		while(1) {
			Node::addToPath(a->pathLeft, add);
			add.addToVal(a->val);
			while(a->parent != NULL && a->parent->pathLeft == a)
				a = a->parent;
			if(a->parent == NULL)
				break;
			a = a->parent;
		}
		updatePath(orgA);
	}

	void buildPathTrees(const vector<int> &sizes) {
		vector<int> weights, childL, childR;
		pathRoots.resize(nodes.size());

		int C = hld.lengths.size();
		for(int c = 0; c < C; ++ c) {
			int len = hld.lengths[c];
			const int *path = hld.nodesBegin(c);
			weights.resize(len);
			for(int j = 0; j < len; ++ j)
				weights[j] = sizes[path[j]];
			int rootj = makeBiasedBinarySearchTree(weights, childL, childR);
			int rootNode = path[rootj];
			for(int j = 0; j < len; ++ j)
				pathRoots[path[j]] = rootNode;

			nodes[rootNode].parent = NULL;
			for(int j = 0; j < len; ++ j) {
				Node *a = &nodes[path[j]];
				Node *l = childL[j] == -1 ? NULL : &nodes[path[childL[j]]];
				Node *r = childR[j] == -1 ? NULL : &nodes[path[childR[j]]];
				if((a->pathLeft = l) != NULL)
					l->parent = a;
				if((a->pathRight = r) != NULL)
					r->parent = a;
			}
		}
	}

	//weightsは破壊される
	int makeBiasedBinarySearchTree(vector<int> &weights, vector<int> &resL, vector<int> &resR) {
		int n = weights.size();
		weights.resize(n + 1);
		int sum = 0;
		for(int i = 0; i < n; ++ i) {
			int w = weights[i];
			weights[i] = sum;
			sum += w;
		}
		weights[n] = sum;
		resL.resize(n);
		resR.resize(n);
		return makeBiasedBinarySearchTreeRec(-1, 0, n, weights, resL, resR);
	}

	//最初2倍してく2分探索でうまくコストを log(小さい方のサイズ) にすませるようにすれば O(n) にできる。
	//けど、ここではやってない。この単純な2分探索でも、任意の重みで呼ばれるわけではないのであんまりコストがかかることはない気がする。
	int makeBiasedBinarySearchTreeRec(int p, int i, int j, const vector<int> &prefixSums, vector<int> &resL, vector<int> &resR) {
		if(i == j)
			return -1;
		//prefixSums[mid+1] - prefixSums[i] >= prefixSums[j] - prefixSums[mid]
		//prefixSums[mid] + prefixSums[mid+1] >= prefixSums[i] + prefixSums[j]
		int mid;
		if(i + 1 == j) {
			mid = i;
		}else {
			int t = prefixSums[i] + prefixSums[j];
			int l = i, u = j-1;
			while(u - l > 0) {
				int m = (l + u) / 2;
				if(prefixSums[m] + prefixSums[m+1] >= t)
					u = m;
				else
					l = m + 1;
			}
			mid = u;
		}
		assert(mid < j);
		resL[mid] = makeBiasedBinarySearchTreeRec(mid * 2 + 0, i, mid, prefixSums, resL, resR);
		resR[mid] = makeBiasedBinarySearchTreeRec(mid * 2 + 1, mid + 1, j, prefixSums, resL, resR);
		return mid;
	}

	void getGlobalOrder(int globalRoot) {
		globalOrder.clear();
		globalOrder.reserve(nodes.size());
		vector<const Node *> stk;
		int C = hld.lengths.size();
		for(int c = 0; c < C; ++ c) {
			stk.push_back(&nodes[pathRoots[hld.nodesBegin(c)[0]]]);
			while(!stk.empty()) {
				const Node *a = stk.back(); stk.pop_back();
				if(a == NULL) continue;
				globalOrder.push_back(getNodeIndex(a));
			
				stk.push_back(a->pathLeft);
				stk.push_back(a->pathRight);
			}
		}
		assert(globalOrder.size() == nodes.size());
	}

};

bool naivegetpath(int i, int p, int t, const vector<vi> &g, vi &path) {
	bool r = false;
	if(i == t) {
		r = true;
	}else {
		each(j, g[i]) if(*j != p)
			r = r || naivegetpath(*j, i, t, g, path);
	}
	if(r)
		path.push_back(i);
	return r;
}



#ifdef MY_LOCAL_RUN
#include "C:\Dropbox\backup\implements\Util\MyAssert.hpp"
#undef assert
#define assert my_assert
#define TEST
#endif

int main() {
	int N;
	for(int iii = 0; ; ++ iii) {
#ifndef TEST
		if(!~scanf("%d", &N)) break;
#else
		if(iii % 100 == 0) cerr << iii << "\r", cerr.flush();
		N=rand()%10+1;
#endif

		vector<int> S(N), C(N);
		rep(i, N) {
#ifndef TEST
			scanf("%d", &S[i]);
#else
			S[i]=rand()%100;
#endif
		}
		rep(i, N) {
#ifndef TEST
			scanf("%d", &C[i]);
#else
			C[i]=rand()%100;
#endif
		}
		vector<Val> initVals(N);
		rep(i, N)
			initVals[i] = Val(S[i], C[i]);
		vector<vi> g(N);
		rep(i, N-1) {
			int A, B;
#ifndef TEST
			scanf("%d%d", &A, &B), -- A, -- B;
#else
			A=i+1,B=rand()%(i+1);
#endif
			g[A].push_back(B);
			g[B].push_back(A);
		}
		BiasedHeavyLightDecompositionPathOnly bhld;
		bhld.build(g, 0, initVals);

#ifdef TEST
		vector<mint> naiveval(all(S));
#endif
		int Q;
#ifndef TEST
		scanf("%d", &Q);
#else
		Q=rand()%100+1;
#endif
		rep(ii, Q) {
			int ty;
#ifndef TEST
			scanf("%d", &ty);
#else
			ty=rand()%2;
#endif
			if(ty == 0) {
				int X, Y, Z;
#ifndef TEST
				scanf("%d%d%d", &X, &Y, &Z), -- X, -- Y;
#else
				X=rand()%N,Y=rand()%N,Z=rand()%100;
#endif

				bhld.addToPath(X, Y, PathAdd(Z));

#ifdef TEST
				vi naivepath; naivegetpath(X, -1, Y, g, naivepath);
				each(j, naivepath) naiveval[*j] += mint(Z) * C[*j];
#endif
			}else {
				int X, Y;
#ifndef TEST
				scanf("%d%d", &X, &Y), -- X, -- Y;
#else
				X=rand()%N,Y=rand()%N;
#endif
				mint ans = bhld.sumPath(X, Y);
#ifndef TEST
				printf("%d\n", ans.get());
#else

				vi naivepath; naivegetpath(X, -1, Y, g, naivepath);
				mint naivesum, naivecoefsum;
				each(j, naivepath) naivecoefsum += C[*j], naivesum += naiveval[*j];
				if(ans.get() != naivesum.get())
					cerr << ans.get() << " != " << naivesum.get() << endl;
#endif
			}
#ifdef TEST
//			rep(i, N) bhld.getVal(i);
//			rep(i, N) bhld.setVal(i, bhld.getVal(i));
			rep(i, N) assert(bhld.nodes[i].debugCheckUpdated());
//			rep(i, N) assert(bhld.getVal(i).val.x == naiveval[i].x);
#endif
		}
	}
	return 0;
}
0