#include #include #include #include #include #include #include // Using standard namespace for convenience using namespace std; // Define long long for larger integer types typedef long long ll; // Structure to represent an edge with its properties struct Edge { int u, v; // Endpoints of the edge (1-based indexing) ll C; // Congestion level int id; // Original index of the edge (0 to N-2) }; // Global vector to store all edges of the tree vector edges; // Disjoint Set Union (DSU) structure using vectors vector parent; // parent[i] stores the parent of node i vector sz; // sz[i] stores the size of the set rooted at i // Initialize DSU structure for N nodes (1 to N) // The nodes are numbered 1 to N. We use vector size N+1 for 1-based indexing. void init_dsu(int N) { parent.resize(N + 1); // Initially, each node is its own parent iota(parent.begin(), parent.end(), 0); // Initially, each set has size 1 sz.assign(N + 1, 1); } // Find the representative (root) of the set containing node v // Uses path compression for optimization int find_set(int v) { // If v is the root, return v if (v == parent[v]) return v; // Otherwise, find the root and update parent[v] to point directly to the root (path compression) return parent[v] = find_set(parent[v]); } // Unite the sets containing nodes a and b // Uses union by size for optimization void unite_sets(int a, int b) { // Find representatives of the sets containing a and b a = find_set(a); b = find_set(b); // If they are already in the same set, do nothing if (a != b) { // Union by size: attach the smaller tree to the root of the larger tree if (sz[a] < sz[b]) swap(a, b); // Ensure a is the root of the larger tree parent[b] = a; // Make a the parent of b's root sz[a] += sz[b]; // Update the size of the merged set rooted at a } } // Function to check if a maximum congestion level X is achievable // This function implements the logic derived in the thought process. bool check(ll X, int N) { // Store indices of edges with congestion greater than X. These are the "critical" edges. vector edges_gt_X_ids; // Track the maximum congestion among these critical edges. This determines the reduction amount. ll max_C_on_path = 0; // Data structures to represent the subgraph formed by critical edges: map>> adj_subgraph; // Adjacency list: node -> {neighbor, edge_id} map degree_subgraph; // Degree of each node within this subgraph set involved_nodes_set; // Set of unique nodes incident to any critical edge // Iterate through all edges to identify critical ones and build the subgraph representation for (int i = 0; i < N - 1; ++i) { if (edges[i].C > X) { // If edge congestion exceeds target X edges_gt_X_ids.push_back(i); // Add edge index to list of critical edges max_C_on_path = max(max_C_on_path, edges[i].C); // Update max congestion found on critical path candidate int u = edges[i].u, v = edges[i].v; // Add edge to subgraph adjacency list for both endpoints adj_subgraph[u].push_back({v, i}); adj_subgraph[v].push_back({u, i}); // Increment degrees for both endpoints in the subgraph degree_subgraph[u]++; degree_subgraph[v]++; // Add nodes to the set of involved nodes (set handles uniqueness automatically) involved_nodes_set.insert(u); involved_nodes_set.insert(v); } } // If there are no critical edges, the current maximum congestion is already <= X. // Thus, X is achievable (e.g., by building no subway or subway between p=q). if (edges_gt_X_ids.empty()) { return true; } // Check if the critical edges form a simple path structure. // A simple path has all nodes with degree at most 2, and exactly 2 nodes with degree 1 (the endpoints). int start_node = -1; // Potential endpoint 1 int end_node = -1; // Potential endpoint 2 int deg1_count = 0; // Counter for nodes with degree 1 // Iterate through all nodes involved in the critical subgraph for (int node : involved_nodes_set) { // Get degree of the node in the subgraph. If node not in map, degree is 0 (should not happen if it's in involved_nodes_set from an edge) int deg = degree_subgraph.count(node) ? degree_subgraph[node] : 0; // Check degree property for path: must be <= 2 if (deg > 2) { return false; // Found node with degree > 2, cannot be a simple path } // Identify nodes with degree 1, which must be the endpoints of the path if (deg == 1) { deg1_count++; if (start_node == -1) start_node = node; // Assign first degree-1 node found to start_node else end_node = node; // Assign second degree-1 node found to end_node } } // A non-empty path graph must have exactly 2 nodes with degree 1. if (deg1_count != 2) { // If not exactly 2 nodes have degree 1, it's not a single simple path. // Could be disconnected, a cycle (all deg 2, deg1_count=0), or empty (handled earlier). return false; } // Verify path connectivity and structure using Breadth-First Search (BFS) // Start BFS from one potential endpoint (start_node). map visited_nodes_bfs; // Keep track of visited nodes during BFS to avoid cycles and redundant work vector q; // Queue for BFS traversal q.push_back(start_node); visited_nodes_bfs[start_node] = true; int head = 0; // Index simulating queue front removal int edges_bfs_count = 0; // Count edges traversed in BFS forming the BFS tree while(head < q.size()){ int u = q[head++]; // Dequeue node u // Check if node u exists in the subgraph adjacency list (should always be true if BFS reached it) if (adj_subgraph.count(u)) { // Explore neighbors v of u in the subgraph for(auto& edge_info : adj_subgraph[u]){ int v = edge_info.first; // Neighbor node // Check if neighbor v has been visited using map lookup. If not found, it's not visited. if(visited_nodes_bfs.find(v) == visited_nodes_bfs.end()){ visited_nodes_bfs[v] = true; // Mark v as visited q.push_back(v); // Enqueue v // Increment count of edges explored. This counts edges in the BFS tree. // For a path graph, the BFS tree will cover all edges exactly once. edges_bfs_count++; } } } } // Final checks after BFS completion: // 1. The number of edges explored (`edges_bfs_count`) must equal the total number of critical edges (`edges_gt_X_ids.size()`). // If fewer edges explored, implies disconnected components. If more, implies cycle (not possible in tree). // 2. The designated end node (`end_node`) must have been visited by the BFS starting from `start_node`. if (edges_bfs_count != edges_gt_X_ids.size() || visited_nodes_bfs.find(end_node) == visited_nodes_bfs.end()) { // If either condition fails, the critical edges do not form a single connected path. return false; } // If the critical edges form a valid path P*, proceed to check the core condition. // The condition is that there must exist a path P_{p,q} containing P* such that // min_{e' in P_{p,q}} C_{e'} >= max_{e in P*} C_e - X. // Let K = max_{e in P*} C_e - X. We need min_{e' in P_{p,q}} C_{e'} >= K. ll K = max_C_on_path - X; // If K <= 0, the required minimum congestion is non-positive. // Since all initial congestions C_i are >= 1, any edge satisfies C_i >= K. // Therefore, the path P* itself (by choosing p=start_node, q=end_node) works. // The condition is trivially satisfied. if (K <= 0) { return true; } // If K > 0, we have two sub-conditions: // 1. All edges on the critical path P* must themselves satisfy C_e >= K. // This is because P* must be part of the chosen path P_{p,q}. for (int edge_id : edges_gt_X_ids) { if (edges[edge_id].C < K) { // If any edge on P* has congestion less than K, it's impossible to satisfy the condition. return false; } } // 2. The path P* must be extendable (or is itself sufficient) into a path P_{p,q} using only edges with C >= K. // This is equivalent to checking if the endpoints of P* (start_node, end_node) // are connected in the graph G_{\ge K} which contains only edges from the original tree with congestion >= K. // We use DSU to efficiently check this connectivity. init_dsu(N); // Initialize DSU structure for N nodes // Consider all edges in the original tree for (int i = 0; i < N - 1; ++i) { // If an edge's congestion meets the minimum requirement K if (edges[i].C >= K) { // Unite the sets containing its endpoints in the DSU structure unite_sets(edges[i].u, edges[i].v); } } // Check if start_node and end_node are in the same set (connected) in G_{\ge K} return find_set(start_node) == find_set(end_node); } int main() { // Optimize C++ standard streams for faster I/O operations ios_base::sync_with_stdio(false); cin.tie(NULL); int N; // Number of intersections cin >> N; edges.resize(N - 1); // Resize edge vector to hold N-1 edges ll max_C = 0; // Variable to track the maximum initial congestion level // Read edge data for the N-1 roads for (int i = 0; i < N - 1; ++i) { int u, v; // Endpoints of the road ll C; // Congestion level cin >> u >> v >> C; // Store edge properties. Using 1-based indexing for nodes as given. Edge ID is 0-based. edges[i] = {u, v, C, i}; // Update the maximum congestion found so far max_C = max(max_C, C); } // Binary search for the minimum achievable maximum congestion (the answer D). // The answer must be between 0 and the initial maximum congestion `max_C`. ll low = 0, high = max_C, ans = max_C; // Initialize binary search range and the best answer found so far while (low <= high) { // Calculate midpoint carefully to avoid overflow ll mid = low + (high - low) / 2; // Check if a maximum congestion of `mid` is achievable if (check(mid, N)) { // If yes, `mid` is a potential answer. Store it and try for an even smaller value. ans = mid; high = mid - 1; // Adjust search range to [low, mid-1] } else { // If no, `mid` is too small. Need a larger maximum congestion. low = mid + 1; // Adjust search range to [mid+1, high] } } // Output the minimum achievable maximum congestion found by the binary search cout << ans << endl; return 0; }