結果

問題 No.1038 TreeAddQuery
ユーザー qwewe
提出日時 2025-05-14 12:59:49
言語 C++17
(gcc 13.3.0 + boost 1.87.0)
結果
TLE  
実行時間 -
コード長 17,815 bytes
コンパイル時間 1,629 ms
コンパイル使用メモリ 118,896 KB
実行使用メモリ 87,868 KB
最終ジャッジ日時 2025-05-14 13:01:23
合計ジャッジ時間 22,708 ms
ジャッジサーバーID
(参考情報)
judge2 / judge3
このコードへのチャレンジ
(要ログイン)
ファイルパターン 結果
sample AC * 3
other AC * 10 TLE * 1 -- * 13
権限があれば一括ダウンロードができます

ソースコード

diff #

#include <iostream>
#include <vector>
#include <numeric>
#include <algorithm>
#include <map>
#include <cmath> // Required for log2 if calculating LOGN dynamically
#include <vector> // Ensure vector is included (redundant include is ok)

using namespace std;

// Use long long for sums and update values to avoid overflow
typedef long long ll;

// Define maximum number of nodes and logarithm of N for LCA sparse table
const int MAXN = 100005; 
// Calculate LOGN based on MAXN. ceil(log2(100005)) is approximately 16.6. 
// Using 18 provides a safe buffer.
const int LOGN = 18; 

// Adjacency list representation of the tree
vector<int> adj[MAXN];
int N, Q; // Number of nodes and queries

// LCA related variables
int depth[MAXN];          // depth[i] stores the depth of node i (distance from root)
int parent[LOGN][MAXN];   // parent[k][i] stores the 2^k-th ancestor of node i
int subtree_size[MAXN];   // Used transiently for centroid decomposition subtree size calculation

/**
 * @brief Performs Depth First Search to compute depths and immediate parents for LCA.
 * @param u Current node.
 * @param p Parent of current node.
 * @param d Depth of current node.
 */
void dfs_lca(int u, int p, int d) {
    depth[u] = d;
    parent[0][u] = p; // The 0-th ancestor (2^0 = 1) is the immediate parent
    for (int v : adj[u]) {
        if (v == p) continue; // Skip the parent node
        dfs_lca(v, u, d + 1);
    }
}

/**
 * @brief Builds the sparse table for fast LCA queries.
 * Precomputes 2^k-th ancestors for all nodes.
 */
void build_lca() {
    // Perform initial DFS to get depths and immediate parents (parent[0][*])
    dfs_lca(1, 0, 0); // Assuming node 1 is the root (arbitrary choice for tree)
    
    // Compute 2^k-th ancestors using dynamic programming
    for (int k = 1; k < LOGN; ++k) {
        for (int i = 1; i <= N; ++i) {
             // If the 2^(k-1)-th ancestor exists
             if (parent[k - 1][i] != 0) { 
                // The 2^k-th ancestor is the 2^(k-1)-th ancestor of the 2^(k-1)-th ancestor
                parent[k][i] = parent[k - 1][parent[k - 1][i]];
            } else {
                // If 2^(k-1)-th ancestor doesn't exist, then 2^k-th ancestor also doesn't exist
                parent[k][i] = 0; 
            }
        }
    }
}

/**
 * @brief Computes the Lowest Common Ancestor (LCA) of two nodes u and v.
 * @param u First node.
 * @param v Second node.
 * @return The LCA node of u and v.
 */
int get_lca(int u, int v) {
    // Ensure node u is deeper or at the same level as node v
    if (depth[u] < depth[v]) swap(u, v);
    
    // Lift node u up to the same depth as node v using binary lifting
    for (int k = LOGN - 1; k >= 0; --k) {
        // Check if jumping 2^k steps up keeps u at least as deep as v
        if (depth[u] - (1 << k) >= depth[v]) {
            u = parent[k][u]; // Jump up
        }
    }

    // If u and v are now the same node, it's the LCA
    if (u == v) return u;

    // Lift u and v up together maintaining the property that their parents are different
    for (int k = LOGN - 1; k >= 0; --k) {
        // If their 2^k-th ancestors are different, jump both u and v up by 2^k
        if (parent[k][u] != parent[k][v]) {
            u = parent[k][u];
            v = parent[k][v];
        }
    }
    // After the loop, u and v are children of the LCA. Return the parent of u (or v).
    return parent[0][u];
}

/**
 * @brief Calculates the shortest distance between two nodes u and v in the tree.
 * @param u First node.
 * @param v Second node.
 * @return The shortest distance between u and v.
 */
int get_dist(int u, int v) {
    // Nodes are 1-indexed. Check against 0 which indicates root's parent or invalid node.
    if (u == 0 || v == 0) return N; // Return a large distance for invalid nodes
    int lca = get_lca(u, v);
    // Distance formula: dist(u, v) = depth(u) + depth(v) - 2 * depth(lca)
    return depth[u] + depth[v] - 2 * depth[lca];
}

// Fenwick tree (BIT) implementation supporting Range Update Point Query (RUPQ) model
struct FenwickTree {
    vector<ll> bit; // The BIT array, stores sums/updates
    int size;       // The size of the conceptual array being represented (0 to size-1)

    FenwickTree() : size(0) {} // Default constructor
    // Constructor initializing BIT with a given size
    FenwickTree(int sz) : size(sz) { bit.assign(sz + 1, 0); } // BIT array uses 1-based indexing internally, size sz+1
    
    // Method to re-initialize/assign size and reset BIT
    void assign(int sz) { size = sz; bit.assign(sz + 1, 0); }

    /**
     * @brief Performs a point update operation required for the RUPQ model.
     * Adds `delta` to the element at index `idx`. Used to implement range updates:
     * range update [L, R] by `val` is done via point_update(L, val) and point_update(R+1, -val).
     * @param idx 0-based index in the conceptual array.
     * @param delta Value to add.
     */
    void point_update(int idx, ll delta) {
        // Check bounds for the 0-based index
        if (idx >= size) return; // Update past the end is ignored
        if (idx < 0) return;   // Update before the beginning is ignored/error
        
        idx++; // Convert to 1-based index for BIT operations
        // Propagate the update through the BIT structure
        for (; idx <= size; idx += idx & -idx) { bit[idx] += delta; }
    }

    /**
     * @brief Performs a range query operation for the RUPQ model.
     * Returns the actual value at index `idx`. This is computed as the prefix sum up to `idx` in the BIT.
     * @param idx 0-based index in the conceptual array.
     * @return The value at index idx.
     */
    ll range_query(int idx) {
        // Check bounds for the 0-based index
        if (idx < 0) return 0;      // Query before start index returns 0
        if (idx >= size) return 0;  // Query past end index returns 0

        idx++; // Convert 0-based idx to 1-based index for BIT operations
        ll sum = 0;
        // Sum up values from relevant BIT nodes
        for (; idx > 0; idx -= idx & -idx) { sum += bit[idx]; }
        return sum;
    }
};

// Centroid Decomposition related variables
bool centroid_removed[MAXN]; // Flags indicating if a node has been removed as a centroid
int centroid_parent[MAXN];   // Stores the parent of each node in the centroid tree
vector<int> dists1[MAXN], dists2[MAXN]; // Stores unique sorted distances for FT1 and FT2 per centroid
map<int, int> dist_map1[MAXN], dist_map2[MAXN]; // Maps distances to their compressed indices for FTs
FenwickTree ft1[MAXN], ft2[MAXN]; // Two Fenwick trees per centroid node
vector<int> component_nodes; // Temporary storage for nodes in the current component during decomposition

/**
 * @brief DFS used within centroid decomposition to find subtree sizes in the current active component.
 * @param u Current node.
 * @param p Parent node in the DFS traversal.
 */
void find_subtree_sizes_cd(int u, int p) {
    subtree_size[u] = 1; // Initialize size to 1 (the node itself)
    component_nodes.push_back(u); // Add current node to the list for this component
    for (int v : adj[u]) {
        // Explore neighbor if it's not the parent and not already removed as a centroid
        if (v == p || centroid_removed[v]) continue; 
        find_subtree_sizes_cd(v, u);
        subtree_size[u] += subtree_size[v]; // Accumulate sizes from children
    }
}

/**
 * @brief Finds the centroid of the component rooted at `u` with total size `total_size`.
 * A centroid is a node whose removal splits the component into subtrees each of size at most total_size / 2.
 * @param u Current node.
 * @param p Parent node in the DFS traversal.
 * @param total_size Total number of nodes in the current component.
 * @return The centroid node.
 */
int find_centroid(int u, int p, int total_size) {
    for (int v : adj[u]) {
        if (v == p || centroid_removed[v]) continue;
        // If any child subtree is more than half the total size, the centroid must be in that subtree
        if (subtree_size[v] * 2 > total_size) {
            return find_centroid(v, u, total_size); // Recurse into the larger subtree
        }
    }
    // If no child's subtree is > half size, the current node `u` is the centroid
    return u;
}

/**
 * @brief Recursively performs centroid decomposition on the tree/subtree rooted at `u`.
 * @param u The starting node for decomposition in the current component.
 * @param p_centroid The centroid parent of the component being decomposed. 0 if it's the root component.
 * @return The centroid found for this component.
 */
int decompose(int u, int p_centroid) {
    component_nodes.clear(); // Reset the list of nodes for the current component
    find_subtree_sizes_cd(u, 0); // Compute subtree sizes and collect nodes for the component
    int centroid = find_centroid(u, 0, component_nodes.size()); // Find the centroid
    
    centroid_removed[centroid] = true; // Mark the centroid as removed for subsequent steps
    centroid_parent[centroid] = p_centroid; // Set its parent in the centroid tree

    // Collect distances dist(centroid, v) for all nodes v in the current component using BFS
    vector<int> current_dists1; // To store distances for FT1
    map<int, int> node_dist_from_c; // Map: node -> distance from centroid
    vector<pair<int,int>> q; // Queue for BFS traversal
    q.push_back({centroid, 0});
    node_dist_from_c[centroid] = 0;

    int head = 0; // BFS queue index
    while(head < q.size()){
        pair<int,int> curr = q[head++];
        int curr_node = curr.first;
        int curr_d = curr.second;
        current_dists1.push_back(curr_d); // Add distance to list

        // Explore neighbors
        for(int neighbor : adj[curr_node]){
             // Explore neighbor if it's not already removed and not visited yet in this BFS
             if(!centroid_removed[neighbor] && node_dist_from_c.find(neighbor) == node_dist_from_c.end()){
                 node_dist_from_c[neighbor] = curr_d + 1;
                 q.push_back({neighbor, curr_d + 1});
             }
        }
    }
    
    // Prepare FT1 for the centroid: Sort unique distances, map them to indices, initialize FT
    sort(current_dists1.begin(), current_dists1.end());
    current_dists1.erase(unique(current_dists1.begin(), current_dists1.end()), current_dists1.end());
    dists1[centroid] = current_dists1; // Store unique sorted distances
    for(int i=0; i<dists1[centroid].size(); ++i) dist_map1[centroid][dists1[centroid][i]] = i; // Create map distance->index
    ft1[centroid].assign(dists1[centroid].size()); // Initialize FT1 with correct size

    // Collect distances dist(p_centroid, v) for nodes v in component, needed for FT2
    if (p_centroid != 0) { // Only if parent centroid exists
        vector<int> current_dists2; // To store distances for FT2
        for(auto const& [node, dist_c_v] : node_dist_from_c) { // Iterate over nodes found in the component
            current_dists2.push_back(get_dist(p_centroid, node)); // Calculate distance using LCA
        }
        // Prepare FT2 for the centroid
        sort(current_dists2.begin(), current_dists2.end());
        current_dists2.erase(unique(current_dists2.begin(), current_dists2.end()), current_dists2.end());
        dists2[centroid] = current_dists2; // Store unique sorted distances
        for(int i=0; i<dists2[centroid].size(); ++i) dist_map2[centroid][dists2[centroid][i]] = i; // Create map distance->index
        ft2[centroid].assign(dists2[centroid].size()); // Initialize FT2 with correct size
    }

    // Recursively decompose the remaining subtrees attached to the centroid
    for (int v : adj[centroid]) {
        if (!centroid_removed[v]) { // If neighbor is not already removed
            decompose(v, centroid); // Recurse, passing current centroid as parent
        }
    }
    return centroid; // Return the found centroid (optional)
}

/**
 * @brief Helper function to find the compressed index for the largest distance <= max_dist.
 * Uses binary search on the sorted unique distance list.
 * @param unique_dists Sorted vector of unique distances.
 * @param max_dist The maximum distance threshold.
 * @return The compressed index (0-based) or -1 if no such distance exists.
 */
int get_compressed_idx_upper(const vector<int>& unique_dists, int max_dist) {
    if (unique_dists.empty()) return -1; // Handle empty distance list case
    // Find the first element strictly greater than max_dist
    auto it = upper_bound(unique_dists.begin(), unique_dists.end(), max_dist);
    // If iterator is at the beginning, all elements are > max_dist
    if (it == unique_dists.begin()) return -1; 
    // Otherwise, the element before `it` is the largest one <= max_dist. Return its index.
    return (it - 1) - unique_dists.begin(); // Calculate 0-based index
}

/**
 * @brief Processes an update query (X, Y, Z). Adds Z to weights of nodes within distance Y of X.
 * Traverses up the centroid tree from X, updating relevant Fenwick trees.
 * @param X Center node for the update.
 * @param Y Maximum distance from X.
 * @param Z Value to add to weights.
 */
void update(int X, int Y, ll Z) {
    int curr = X; // Start from query node X
    while (curr != 0) { // Traverse up the centroid tree until root's parent (0)
        int dist_X_curr = get_dist(X, curr); // Distance from update center X to current centroid `curr`
        int Y_prime = Y - dist_X_curr; // Effective radius relative to `curr`
        
        // If effective radius is non-negative, this centroid path contributes to the update
        if (Y_prime >= 0) {
            // Update FT1[curr]: affects nodes v based on dist(curr, v) <= Y_prime
            if (!dists1[curr].empty()) {
                 // Find index corresponding to max distance <= Y_prime
                 int max_idx = get_compressed_idx_upper(dists1[curr], Y_prime);
                 if (max_idx != -1) { // If such a distance exists
                     // Apply range update [0, max_idx] on FT1[curr]
                     ft1[curr].point_update(0, Z); // Add Z at start of range
                     ft1[curr].point_update(max_idx + 1, -Z); // Subtract Z after end of range
                 }
            }
        }

        int p = centroid_parent[curr]; // Get parent centroid
        if (p != 0) { // If parent exists
             // Update FT2[curr]: cancels effect from parent p's update for nodes in `curr`'s subtree
             // Cancellation based on dist(p, v) <= Y - dist(X, p)
             int Y_prime_cancel = Y - get_dist(X, p); 
             if (Y_prime_cancel >= 0) {
                if (!dists2[curr].empty()) { // Check if FT2 exists and is initialized
                     // Find index for max distance dist(p,v) <= Y_prime_cancel
                     int max_idx = get_compressed_idx_upper(dists2[curr], Y_prime_cancel);
                     if (max_idx != -1) {
                         // Apply range update [0, max_idx] with -Z on FT2[curr]
                         ft2[curr].point_update(0, -Z); // Add -Z at start
                         ft2[curr].point_update(max_idx + 1, Z); // Add Z after end to cancel -Z
                     }
                 }
             }
        }
        curr = p; // Move up to the parent centroid
    }
}

/**
 * @brief Processes a query for the current weight of vertex V.
 * Traverses up the centroid tree from V, summing contributions from relevant Fenwick trees.
 * @param V The node to query.
 * @return The current weight of node V.
 */
ll query(int V) {
    ll total_weight = 0; // Initialize total weight
    int curr = V; // Start from query node V
    while (curr != 0) { // Traverse up the centroid tree
        // Add contribution from FT1[curr], based on distance dist(V, curr)
        int dist_V_curr = get_dist(V, curr);
        // Check if this distance exists in the compressed map for FT1
        if (dist_map1[curr].count(dist_V_curr)) { 
             int idx = dist_map1[curr][dist_V_curr]; // Get compressed index
             total_weight += ft1[curr].range_query(idx); // Query FT1 value at this index
        }

        // Add contribution from FT2[curr] (correction part), based on distance dist(V, p)
        int p = centroid_parent[curr]; // Get parent centroid
        if (p != 0) { // If parent exists
            int dist_V_p = get_dist(V, p); // Distance from V to parent centroid p
             // Check if this distance exists in the compressed map for FT2
             if (dist_map2[curr].count(dist_V_p)) { 
                 int idx = dist_map2[curr][dist_V_p]; // Get compressed index
                 total_weight += ft2[curr].range_query(idx); // Query FT2 value at this index
             }
        }
        curr = p; // Move up to the parent centroid
    }
    return total_weight;
}

// Main function
int main() {
    // Optimize standard I/O operations
    ios_base::sync_with_stdio(false);
    cin.tie(NULL);

    // Read number of nodes N and number of queries Q
    cin >> N >> Q;
    // Read N-1 edges to build the tree adjacency list
    for (int i = 0; i < N - 1; ++i) {
        int u, v;
        cin >> u >> v;
        adj[u].push_back(v);
        adj[v].push_back(u);
    }

    // Preprocessing steps
    build_lca(); // Build LCA sparse table
    fill(centroid_removed + 1, centroid_removed + N + 1, false); // Initialize centroid removed flags
    decompose(1, 0); // Perform centroid decomposition starting from node 1

    // Process Q queries
    for (int i = 0; i < Q; ++i) {
        int X, Y; // Query parameters: center node X, distance Y
        ll Z;     // Update value Z
        cin >> X >> Y >> Z;
        cout << query(X) << "\n"; // First, output the current weight of node X
        update(X, Y, Z);         // Then, apply the update operation
    }

    return 0; // Successful execution
}
0