結果
問題 | No.1326 ふたりのDominator |
ユーザー |
![]() |
提出日時 | 2024-12-31 15:06:10 |
言語 | C++23 (gcc 13.3.0 + boost 1.87.0) |
結果 |
AC
|
実行時間 | 173 ms / 2,000 ms |
コード長 | 17,553 bytes |
コンパイル時間 | 3,648 ms |
コンパイル使用メモリ | 213,040 KB |
実行使用メモリ | 24,456 KB |
最終ジャッジ日時 | 2024-12-31 15:06:19 |
合計ジャッジ時間 | 8,625 ms |
ジャッジサーバーID (参考情報) |
judge3 / judge5 |
(要ログイン)
ファイルパターン | 結果 |
---|---|
sample | AC * 1 |
other | AC * 24 |
ソースコード
#include <algorithm>#include <array>#include <bitset>#include <cassert>#include <chrono>#include <cmath>#include <complex>#include <deque>#include <forward_list>#include <fstream>#include <functional>#include <iomanip>#include <ios>#include <iostream>#include <limits>#include <list>#include <map>#include <memory>#include <numeric>#include <optional>#include <queue>#include <random>#include <set>#include <sstream>#include <stack>#include <string>#include <tuple>#include <type_traits>#include <unordered_map>#include <unordered_set>#include <utility>#include <vector>using namespace std;using lint = long long;using pint = pair<int, int>;using plint = pair<lint, lint>;struct fast_ios { fast_ios(){ cin.tie(nullptr), ios::sync_with_stdio(false), cout << fixed << setprecision(20); }; } fast_ios_;#define ALL(x) (x).begin(), (x).end()#define FOR(i, begin, end) for(int i=(begin),i##_end_=(end);i<i##_end_;i++)#define IFOR(i, begin, end) for(int i=(end)-1,i##_begin_=(begin);i>=i##_begin_;i--)#define REP(i, n) FOR(i,0,n)#define IREP(i, n) IFOR(i,0,n)template <typename T> bool chmax(T &m, const T q) { return m < q ? (m = q, true) : false; }template <typename T> bool chmin(T &m, const T q) { return m > q ? (m = q, true) : false; }const std::vector<std::pair<int, int>> grid_dxs{{1, 0}, {-1, 0}, {0, 1}, {0, -1}};int floor_lg(long long x) { return x <= 0 ? -1 : 63 - __builtin_clzll(x); }template <class T1, class T2> T1 floor_div(T1 num, T2 den) { return (num > 0 ? num / den : -((-num + den - 1) / den)); }template <class T1, class T2> std::pair<T1, T2> operator+(const std::pair<T1, T2> &l, const std::pair<T1, T2> &r) { return std::make_pair(l.first + r.first, l.second + r.second); }template <class T1, class T2> std::pair<T1, T2> operator-(const std::pair<T1, T2> &l, const std::pair<T1, T2> &r) { return std::make_pair(l.first - r.first, l.second - r.second); }template <class T> std::vector<T> sort_unique(std::vector<T> vec) { sort(vec.begin(), vec.end()), vec.erase(unique(vec.begin(), vec.end()), vec.end()); return vec; }template <class T> int arglb(const std::vector<T> &v, const T &x) { return std::distance(v.begin(), std::lower_bound(v.begin(), v.end(), x)); }template <class T> int argub(const std::vector<T> &v, const T &x) { return std::distance(v.begin(), std::upper_bound(v.begin(), v.end(), x)); }template <class IStream, class T> IStream &operator>>(IStream &is, std::vector<T> &vec) { for (auto &v : vec) is >> v; return is; }template <class OStream, class T> OStream &operator<<(OStream &os, const std::vector<T> &vec);template <class OStream, class T, size_t sz> OStream &operator<<(OStream &os, const std::array<T, sz> &arr);template <class OStream, class T, class TH> OStream &operator<<(OStream &os, const std::unordered_set<T, TH> &vec);template <class OStream, class T, class U> OStream &operator<<(OStream &os, const pair<T, U> &pa);template <class OStream, class T> OStream &operator<<(OStream &os, const std::deque<T> &vec);template <class OStream, class T> OStream &operator<<(OStream &os, const std::set<T> &vec);template <class OStream, class T> OStream &operator<<(OStream &os, const std::multiset<T> &vec);template <class OStream, class T> OStream &operator<<(OStream &os, const std::unordered_multiset<T> &vec);template <class OStream, class T, class U> OStream &operator<<(OStream &os, const std::pair<T, U> &pa);template <class OStream, class TK, class TV> OStream &operator<<(OStream &os, const std::map<TK, TV> &mp);template <class OStream, class TK, class TV, class TH> OStream &operator<<(OStream &os, const std::unordered_map<TK, TV, TH> &mp);template <class OStream, class... T> OStream &operator<<(OStream &os, const std::tuple<T...> &tpl);template <class OStream, class T> OStream &operator<<(OStream &os, const std::vector<T> &vec) { os << '['; for (auto v : vec) os << v << ','; os <<']'; return os; }template <class OStream, class T, size_t sz> OStream &operator<<(OStream &os, const std::array<T, sz> &arr) { os << '['; for (auto v : arr) os << v<< ','; os << ']'; return os; }template <class... T> std::istream &operator>>(std::istream &is, std::tuple<T...> &tpl) { std::apply([&is](auto &&... args) { ((is >> args), ...);},tpl); return is; }template <class OStream, class... T> OStream &operator<<(OStream &os, const std::tuple<T...> &tpl) { os << '('; std::apply([&os](auto &&... args) {((os << args << ','), ...);}, tpl); return os << ')'; }template <class OStream, class T, class TH> OStream &operator<<(OStream &os, const std::unordered_set<T, TH> &vec) { os << '{'; for (auto v : vec) os<< v << ','; os << '}'; return os; }template <class OStream, class T> OStream &operator<<(OStream &os, const std::deque<T> &vec) { os << "deq["; for (auto v : vec) os << v << ','; os <<']'; return os; }template <class OStream, class T> OStream &operator<<(OStream &os, const std::set<T> &vec) { os << '{'; for (auto v : vec) os << v << ','; os << '}';return os; }template <class OStream, class T> OStream &operator<<(OStream &os, const std::multiset<T> &vec) { os << '{'; for (auto v : vec) os << v << ','; os <<'}'; return os; }template <class OStream, class T> OStream &operator<<(OStream &os, const std::unordered_multiset<T> &vec) { os << '{'; for (auto v : vec) os << v <<','; os << '}'; return os; }template <class OStream, class T, class U> OStream &operator<<(OStream &os, const std::pair<T, U> &pa) { return os << '(' << pa.first << ',' << pa.second << ')'; }template <class OStream, class TK, class TV> OStream &operator<<(OStream &os, const std::map<TK, TV> &mp) { os << '{'; for (auto v : mp) os << v.first << "=>" << v.second << ','; os << '}'; return os; }template <class OStream, class TK, class TV, class TH> OStream &operator<<(OStream &os, const std::unordered_map<TK, TV, TH> &mp) { os << '{'; for(auto v : mp) os << v.first << "=>" << v.second << ','; os << '}'; return os; }#ifdef HITONANODE_LOCALconst string COLOR_RESET = "\033[0m", BRIGHT_GREEN = "\033[1;32m", BRIGHT_RED = "\033[1;31m", BRIGHT_CYAN = "\033[1;36m", NORMAL_CROSSED = "\033[0;9;37m", RED_BACKGROUND = "\033[1;41m", NORMAL_FAINT = "\033[0;2m";#define dbg(x) std::cerr << BRIGHT_CYAN << #x << COLOR_RESET << " = " << (x) << NORMAL_FAINT << " (L" << __LINE__ << ") " << __FILE__ << COLOR_RESET<< std::endl#define dbgif(cond, x) ((cond) ? std::cerr << BRIGHT_CYAN << #x << COLOR_RESET << " = " << (x) << NORMAL_FAINT << " (L" << __LINE__ << ") " <<__FILE__ << COLOR_RESET << std::endl : std::cerr)#else#define dbg(x) ((void)0)#define dbgif(cond, x) ((void)0)#endif// Construct block cut tree// Complexity: O(N + M), N = |vertices|, M = |edges|// based on noshi91's idea https://x.com/noshi91/status/1529858538650374144// based on SSRS's implementation https://ssrs-cp.github.io/cp_library/graph/extended_block_cut_tree.hpp.htmlstruct extended_block_cut_trees {int N; // number of verticesint B; // number of blocksstd::vector<std::vector<int>> to; // (0, ..., N - 1): vertices, (N, ..., N + B - 1): blocksextended_block_cut_trees(int N, const std::vector<std::pair<int, int>> &edges) : N(N), B(0), to(N) {std::vector<std::vector<int>> adjs(N);for (auto [u, v] : edges) adjs.at(u).push_back(v), adjs.at(v).push_back(u);std::vector<int> dfs_next(N, -1), dist(N, -1), back_cnt(N);auto rec1 = [&](auto &&self, int now) -> void {for (int nxt : adjs[now]) {if (dist[nxt] == -1) {dist[nxt] = dist[now] + 1;dfs_next[now] = nxt;self(self, nxt);back_cnt[now] += back_cnt[nxt];} else if (dist[nxt] < dist[now] - 1) {++back_cnt[now];--back_cnt[dfs_next[nxt]];}}};for (int i = 0; i < N; ++i) {if (dist[i] == -1) dist[i] = 0, rec1(rec1, i);}std::vector<bool> used(N);auto rec2 = [&](auto &&self, int now, int current_b) -> void {used[now] = true;bool ok = false;for (int nxt : adjs[now]) {if (dist[nxt] == dist[now] + 1 and !used[nxt]) {if (back_cnt[nxt] > 0) {if (!ok) {ok = true;add_edge(now, current_b);}self(self, nxt, current_b);} else {to.push_back({});++B;add_edge(now, B - 1);self(self, nxt, B - 1);}}}if (!ok and dist[now] > 0) { add_edge(now, current_b); }};for (int i = 0; i < N; ++i) {if (dist[i] == 0) { rec2(rec2, i, B - 1); }if (adjs[i].empty()) {to.push_back({});++B;add_edge(i, B - 1);}}}int size() const { return N + B; }bool is_articulation_point(int vertex) const {assert(0 <= vertex and vertex < N);return to[vertex].size() > 1;}int block_size(int block) const {assert(0 <= block and block < B);return to[N + block].size();}const std::vector<int> &block_vertices(int block) const {assert(0 <= block and block < B);return to[N + block];}// first < N (vertices), second >= N (blocks)std::vector<std::pair<int, int>> get_edges() const {std::vector<std::pair<int, int>> edges;for (int i = 0; i < N; ++i) {for (int j : to[i]) edges.emplace_back(i, j);}return edges;}private:void add_edge(int vertex, int block) {assert(0 <= vertex and vertex < N);assert(0 <= block and block < B);to[vertex].push_back(N + block);to[N + block].push_back(vertex);}};#include <algorithm>#include <cassert>#include <functional>#include <queue>#include <stack>#include <utility>#include <vector>// Heavy-Light Decomposition of trees// Based on http://beet-aizu.hatenablog.com/entry/2017/12/12/235950struct HeavyLightDecomposition {int V;int k;int nb_heavy_path;std::vector<std::vector<int>> e;std::vector<int> par; // par[i] = parent of vertex i (Default: -1)std::vector<int> depth; // depth[i] = distance between root and vertex istd::vector<int> subtree_sz; // subtree_sz[i] = size of subtree whose root is istd::vector<int> heavy_child; // heavy_child[i] = child of vertex i on heavy path (Default: -1)std::vector<int> tree_id; // tree_id[i] = id of tree vertex i belongs tostd::vector<int> aligned_id,aligned_id_inv; // aligned_id[i] = aligned id for vertex i (consecutive on heavy edges)std::vector<int> head; // head[i] = id of vertex on heavy path of vertex i, nearest to rootstd::vector<int> head_ids; // consist of head vertex id'sstd::vector<int> heavy_path_id; // heavy_path_id[i] = heavy_path_id for vertex [i]HeavyLightDecomposition(int sz = 0): V(sz), k(0), nb_heavy_path(0), e(sz), par(sz), depth(sz), subtree_sz(sz), heavy_child(sz),tree_id(sz, -1), aligned_id(sz), aligned_id_inv(sz), head(sz), heavy_path_id(sz, -1) {}void add_edge(int u, int v) {e[u].emplace_back(v);e[v].emplace_back(u);}void _build_dfs(int root) {std::stack<std::pair<int, int>> st;par[root] = -1;depth[root] = 0;st.emplace(root, 0);while (!st.empty()) {int now = st.top().first;int &i = st.top().second;if (i < (int)e[now].size()) {int nxt = e[now][i++];if (nxt == par[now]) continue;par[nxt] = now;depth[nxt] = depth[now] + 1;st.emplace(nxt, 0);} else {st.pop();int max_sub_sz = 0;subtree_sz[now] = 1;heavy_child[now] = -1;for (auto nxt : e[now]) {if (nxt == par[now]) continue;subtree_sz[now] += subtree_sz[nxt];if (max_sub_sz < subtree_sz[nxt])max_sub_sz = subtree_sz[nxt], heavy_child[now] = nxt;}}}}void _build_bfs(int root, int tree_id_now) {std::queue<int> q({root});while (!q.empty()) {int h = q.front();q.pop();head_ids.emplace_back(h);for (int now = h; now != -1; now = heavy_child[now]) {tree_id[now] = tree_id_now;aligned_id[now] = k++;aligned_id_inv[aligned_id[now]] = now;heavy_path_id[now] = nb_heavy_path;head[now] = h;for (int nxt : e[now])if (nxt != par[now] and nxt != heavy_child[now]) q.push(nxt);}nb_heavy_path++;}}void build(std::vector<int> roots = {0}) {int tree_id_now = 0;for (auto r : roots) _build_dfs(r), _build_bfs(r, tree_id_now++);}template <class T> std::vector<T> segtree_rearrange(const std::vector<T> &data) const {assert(int(data.size()) == V);std::vector<T> ret;ret.reserve(V);for (int i = 0; i < V; i++) ret.emplace_back(data[aligned_id_inv[i]]);return ret;}// query for vertices on path [u, v] (INCLUSIVE)voidfor_each_vertex(int u, int v, const std::function<void(int ancestor, int descendant)> &f) const {while (true) {if (aligned_id[u] > aligned_id[v]) std::swap(u, v);f(std::max(aligned_id[head[v]], aligned_id[u]), aligned_id[v]);if (head[u] == head[v]) break;v = par[head[v]];}}void for_each_vertex_noncommutative(int from, int to, const std::function<void(int ancestor, int descendant)> &fup,const std::function<void(int ancestor, int descendant)> &fdown) const {int u = from, v = to;const int lca = lowest_common_ancestor(u, v), dlca = depth[lca];while (u >= 0 and depth[u] > dlca) {const int p = (depth[head[u]] > dlca ? head[u] : lca);fup(aligned_id[p] + (p == lca), aligned_id[u]), u = par[p];}static std::vector<std::pair<int, int>> lrs;int sz = 0;while (v >= 0 and depth[v] >= dlca) {const int p = (depth[head[v]] >= dlca ? head[v] : lca);if (int(lrs.size()) == sz) lrs.emplace_back(0, 0);lrs.at(sz++) = {p, v}, v = par.at(p);}while (sz--) fdown(aligned_id[lrs.at(sz).first], aligned_id[lrs.at(sz).second]);}// query for edges on path [u, v]void for_each_edge(int u, int v, const std::function<void(int, int)> &f) const {while (true) {if (aligned_id[u] > aligned_id[v]) std::swap(u, v);if (head[u] != head[v]) {f(aligned_id[head[v]], aligned_id[v]);v = par[head[v]];} else {if (u != v) f(aligned_id[u] + 1, aligned_id[v]);break;}}}// lowest_common_ancestor: O(log V)int lowest_common_ancestor(int u, int v) const {assert(tree_id[u] == tree_id[v] and tree_id[u] >= 0);while (true) {if (aligned_id[u] > aligned_id[v]) std::swap(u, v);if (head[u] == head[v]) return u;v = par[head[v]];}}int distance(int u, int v) const {assert(tree_id[u] == tree_id[v] and tree_id[u] >= 0);return depth[u] + depth[v] - 2 * depth[lowest_common_ancestor(u, v)];}// Level ancestor, O(log V)// if k-th parent is out of range, return -1int kth_parent(int v, int k) const {if (k < 0) return -1;while (v >= 0) {int h = head.at(v), len = depth.at(v) - depth.at(h);if (k <= len) return aligned_id_inv.at(aligned_id.at(v) - k);k -= len + 1, v = par.at(h);}return -1;}// Jump on tree, O(log V)int s_to_t_by_k_steps(int s, int t, int k) const {if (k < 0) return -1;if (k == 0) return s;int lca = lowest_common_ancestor(s, t);if (k <= depth.at(s) - depth.at(lca)) return kth_parent(s, k);return kth_parent(t, depth.at(s) + depth.at(t) - depth.at(lca) * 2 - k);}};#include <atcoder/fenwicktree>int main() {int N, M;cin >> N >> M;vector<pair<int, int>> edges(M);for (auto &[u, v] : edges) {cin >> u >> v, --u, --v;}const extended_block_cut_trees bct(N, edges);HeavyLightDecomposition hld(bct.size());for (auto [i, j] : bct.get_edges()) hld.add_edge(i, j);hld.build();atcoder::fenwick_tree<int> fw(hld.V);for (int i = 0; i < N; ++i) fw.add(hld.aligned_id[i], 1);int Q;cin >> Q;while (Q--) {int u, v;cin >> u >> v;--u, --v;int ret = 0;if (u != v) {ret = -2;hld.for_each_vertex(u, v, [&](int a, int b) { ret += fw.sum(a, b + 1); });}cout << ret << '\n';}}