#include #include #include #include #include #include #include #include #include #include #include #include #include #define repeat(i,n) for (int i = 0; (i) < (n); ++(i)) #define repeat_from(i,m,n) for (int i = (m); (i) < (n); ++(i)) #define repeat_reverse(i,n) for (int i = (n)-1; (i) >= 0; --(i)) #define repeat_from_reverse(i,m,n) for (int i = (n)-1; (i) >= (m); --(i)) #define whole(f,x,...) ([&](decltype((x)) y) { return (f)(begin(y), end(y), ## __VA_ARGS__); })(x) typedef long long ll; using namespace std; template void setmax(T & a, T const & b) { if (a < b) a = b; } template void setmin(T & a, T const & b) { if (b < a) a = b; } template auto vectors(T a, X x) { return vector(x, a); } template auto vectors(T a, X x, Y y, Zs... zs) { auto cont = vectors(a, y, zs...); return vector(x, cont); } struct heavy_light_decomposition_t { int n; // |V'| vector a; // V ->> V' epic vector > path; // V' -> V*, bottom to top order, disjoint union of codomain matchs V vector > pfind; // V' * V -> int, find in path vector parent; // V' -> V heavy_light_decomposition_t(int v, vector > const & g) { n = 0; a.resize(g.size()); dfs(v, -1, g); } int dfs(int v, int p, vector > const & g) { int heavy_node = -1; int heavy_size = 0; int desc_size = 1; for (int w : g[v]) if (w != p) { int size = dfs(w, v, g); desc_size += size; if (heavy_size < size) { heavy_node = w; heavy_size = size; } } if (heavy_node == -1) { a[v] = n; n += 1; path.emplace_back(); path.back().push_back(v); pfind.emplace_back(); pfind.back()[v] = 0; parent.push_back(p); } else { int i = a[heavy_node]; a[v] = i; pfind[i][v] = path[i].size(); path[i].push_back(v); parent[i] = p; } return desc_size; } }; struct lowest_common_ancestor_t { vector > a; vector depth; lowest_common_ancestor_t(int v, vector > const & g) { int n = g.size(); int l = 1 + floor(log2(n)); a.resize(l); repeat (k,l) a[k].resize(n, -1); depth.resize(n); dfs(v, -1, 0, g, a[0], depth); repeat (k,l-1) { repeat (i,n) { if (a[k][i] != -1) { a[k+1][i] = a[k][a[k][i]]; } } } } static void dfs(int v, int p, int current_depth, vector > const & g, vector & parent, vector & depth) { parent[v] = p; depth[v] = current_depth; for (int w : g[v]) if (w != p) { dfs(w, v, current_depth + 1, g, parent, depth); } } int operator () (int x, int y) const { int l = a.size(); if (depth[x] < depth[y]) swap(x,y); repeat_reverse (k,l) { if (a[k][x] != -1 and depth[a[k][x]] >= depth[y]) { x = a[k][x]; } } assert (depth[x] == depth[y]); if (x == y) return x; repeat_reverse (k,l) { if (a[k][x] != a[k][y]) { x = a[k][x]; y = a[k][y]; } } assert (x != y); assert (a[0][x] == a[0][y]); return a[0][x]; } int descendant_to (int x, int y) const { assert (depth[x] < depth[y]); int l = a.size(); repeat_reverse (k,l) { if (a[k][y] != -1 and depth[a[k][y]] >= depth[x]+1) { y = a[k][y]; } } assert (a[0][y] == x); return y; } }; template struct segment_tree { // on monoid int n; vector a; function append; // associative T unit; template segment_tree(int a_n, T a_unit, F a_append) { n = pow(2,ceil(log2(a_n))); a.resize(2*n-1, a_unit); unit = a_unit; append = a_append; } void point_update(int i, T z) { a[i+n-1] = z; for (i = (i+n)/2; i > 0; i /= 2) { a[i-1] = append(a[2*i-1], a[2*i]); } } T range_concat(int l, int r) { return range_concat(0, 0, n, l, r); } T range_concat(int i, int il, int ir, int l, int r) { if (l <= il and ir <= r) { return a[i]; } else if (ir <= l or r <= il) { return unit; } else { return append( range_concat(2*i+1, il, (il+ir)/2, l, r), range_concat(2*i+2, (il+ir)/2, ir, l, r)); } } }; template T path_concat(heavy_light_decomposition_t & hl, vector > & sts, int v, int w) { auto append = sts.front().append; auto unit = sts.front().unit; T acc = unit; int i = hl.a[v]; if (hl.a[w] == i) { assert (hl.pfind[i][v] <= hl.pfind[i][w]); // v must be a descendant of w acc = append(acc, sts[i].range_concat(hl.pfind[i][v], hl.pfind[i][w]+1)); } else { acc = append(acc, sts[i].range_concat(hl.pfind[i][v], hl.path[i].size())); acc = append(acc, path_concat(hl, sts, hl.parent[i], w)); } return acc; } template T path_concat(heavy_light_decomposition_t & hl, lowest_common_ancestor_t & lca, vector > & sts, int x, int y) { auto append = sts.front().append; auto unit = sts.front().unit; int z = lca(x, y); T acc = unit; if (x != z) acc = append(acc, path_concat(hl, sts, x, lca.descendant_to(z, x))); if (y != z) acc = append(acc, path_concat(hl, sts, y, lca.descendant_to(z, y))); acc = append(acc, path_concat(hl, sts, z, z)); return acc; } int main() { // input int n; scanf("%d", &n); vector > g(n); repeat (i,n-1) { int a, b; scanf("%d%d", &a, &b); g[a].push_back(b); g[b].push_back(a); } vector u(n); repeat (i,n) scanf("%d", &u[i]); // prepare const int root = 0; heavy_light_decomposition_t hl(root, g); vector > sts; repeat (i,hl.n) { sts.emplace_back(hl.path[i].size(), 0, [](ll a, ll b) { return a + b; }); } repeat (i,n) { int l = hl.a[i]; sts[l].point_update(hl.pfind[l][i], u[i]); } lowest_common_ancestor_t lca(root, g); // run ll ans = 0; int m; scanf("%d", &m); repeat (i,m) { int a, b, c; scanf("%d%d%d", &a, &b, &c); ans += path_concat(hl, lca, sts, a, b) * c; } // output printf("%lld\n", ans); return 0; }