結果

問題 No.1300 Sum of Inversions
ユーザー yakamotoyakamoto
提出日時 2020-11-27 22:05:55
言語 C++17
(gcc 12.3.0 + boost 1.83.0)
結果
WA  
実行時間 -
コード長 6,970 bytes
コンパイル時間 2,451 ms
コンパイル使用メモリ 217,796 KB
実行使用メモリ 18,648 KB
最終ジャッジ日時 2023-10-01 06:02:45
合計ジャッジ時間 6,668 ms
ジャッジサーバーID
(参考情報)
judge11 / judge13
このコードへのチャレンジ
(要ログイン)

テストケース

テストケース表示
入力 結果 実行時間
実行使用メモリ
testcase_00 AC 2 ms
4,380 KB
testcase_01 AC 2 ms
4,376 KB
testcase_02 AC 1 ms
4,376 KB
testcase_03 WA -
testcase_04 WA -
testcase_05 WA -
testcase_06 WA -
testcase_07 WA -
testcase_08 WA -
testcase_09 WA -
testcase_10 WA -
testcase_11 WA -
testcase_12 WA -
testcase_13 WA -
testcase_14 WA -
testcase_15 WA -
testcase_16 WA -
testcase_17 WA -
testcase_18 WA -
testcase_19 WA -
testcase_20 WA -
testcase_21 WA -
testcase_22 WA -
testcase_23 WA -
testcase_24 WA -
testcase_25 WA -
testcase_26 WA -
testcase_27 WA -
testcase_28 WA -
testcase_29 WA -
testcase_30 WA -
testcase_31 WA -
testcase_32 WA -
testcase_33 AC 57 ms
18,592 KB
testcase_34 AC 71 ms
18,648 KB
testcase_35 WA -
testcase_36 WA -
権限があれば一括ダウンロードができます

ソースコード

diff #

/**
 * code generated by JHelper
 * More info: https://github.com/AlexeyDmitriev/JHelper
 * @author
 */


#ifndef SOLUTION_COMMON_H

#include <bits/stdc++.h>

using namespace std;

using ll = long long;
using Pii = pair<int, int>;
template<typename T> using V = vector<T>;
using Vi = V<int>;
#define _1 first
#define _2 second
#define all(x) x.begin(), x.end()
#define pb push_back
#define lb lower_bound
#define amax(a, b) a = max(a, b)
#define amin(a, b) a = min(a, b)
#define tmax(_next, _prev, expr) if (_prev != INF) { auto prev = _prev; amax(_next, expr); }
#define tmin(_next, _prev, expr) if (_prev != INF) { auto prev = _prev; amin(_next, expr); }
#define dim2(a, b, init) vector(a, vector(b, init))
#define dim3(a, b, c, init) vector(a, vector(b, vector(c, init)))
#define dim4(a, b, c, d, init) vector(a, vector(b, vector(c, vector(d, init))))

#ifndef M_PI
static const double M_PI = acos(-1.0);
#endif

#ifdef MY_DEBUG
# define DEBUG(x) x
const bool isDebug = true;
#else
# define DEBUG(x)
const bool isDebug = false;
#endif

template<class A, class B>
std::ostream & operator <<(ostream &os, const pair<A, B> &p) {
  os << "(" << p._1 << "," << p._2 << ")";
  return os;
}

void __print(int x) {cerr << x;}
void __print(long x) {cerr << x;}
void __print(long long x) {cerr << x;}
void __print(unsigned x) {cerr << x;}
void __print(unsigned long x) {cerr << x;}
void __print(unsigned long long x) {cerr << x;}
void __print(float x) {cerr << x;}
void __print(double x) {cerr << x;}
void __print(long double x) {cerr << x;}
void __print(char x) {cerr << '\'' << x << '\'';}
void __print(const char *x) {cerr << '\"' << x << '\"';}
void __print(const string &x) {cerr << '\"' << x << '\"';}
void __print(bool x) {cerr << (x ? "true" : "false");}
void __print(V<bool> x) {for (auto i : x) cerr << i;}

template<typename T, typename V>
void __print(const pair<T, V> &x) {cerr << '('; __print(x.first); cerr << ','; __print(x.second); cerr << ')';}
template<typename T>
void __print(const T &x) {int f = 0; cerr << '{'; for (auto const &i: x) cerr << (f++ ? "," : ""), __print(i); cerr << "}";}
void _print() {cerr << "]\n";}
template <typename T, typename... V>
void _print(T t, V... v) {__print(t); if (sizeof...(v)) cerr << ", "; _print(v...);}
#ifdef MY_DEBUG
#define debug(x...) cerr << "[" << #x << "] = ["; _print(x)
#else
#define debug(x...)
#endif


template<class T>
string join(V<T> &A, string delimiter = " ") {
  ostringstream os;
  for (int i = 0; i < A.size(); ++i) {
    if (i > 0) os << delimiter;
    os << A[i];
  }
  return os.str();
}

template <typename T>
istream& operator>>(istream& in, vector<T> &A) {
  for (int i = 0; i < A.size(); i++) {
    in >> A[i];
  }
  return in;
}

template <typename T = int>
tuple<V<T>, V<T>> na2(istream& in, int N, int add = 0) {
  auto res = make_tuple(V<T>(N), V<T>(N));
  for (int i = 0; i < N; ++i) {
    in >> get<0>(res)[i] >> get<1>(res)[i];
    get<0>(res)[i] += add;
    get<1>(res)[i] += add;
  }
  return res;
}


template <typename T = int>
V<V<T>> nm(istream& in, int N, int M, int add = 0) {
  auto res = dim2(N, M, 0);
  for (int i = 0; i < N; ++i) {
    in >> res[i];
    if (add) {
      for (auto &a : res[i]) {
        a += add;
      }
    }
  }
  return res;
}

template <typename T>
inline T floorDiv(T num, T d) {
  if (num >= 0) {
    return num / d;
  } else {
    T res = num / d;
    if (num % d) --res;
    return res;
  }
}

template<typename T>
inline T min2(T a, T b) {
  return min(a, b);
}
template<typename T>
inline T max2(T a, T b) {
  return max(a, b);
}


#define SOLUTION_COMMON_H

#endif //SOLUTION_COMMON_H


template<typename T = int>
class BIT {
  const T zero = 0;

  int n;
  int N;
  V<T> bit;
  int calcN(int x) {
    int k = 1 << (31 - __builtin_clz(x));
    return k == x ? k : k << 1;
  }


public:
  BIT(int n): n(n), N(calcN(n)), bit(N + 1, zero) {}

  void add(int i, T x) {
    i++;
    while(i <= N) {
      bit[i] = bit[i] + x;
      i += i & -i;
    }
  }

  /**
   * [l, r)
   */
  T query(int l, int r) {
    return sumUntil(r) - sumUntil(l);
  }

  T get(int i) {
    return sumUntil(i + 1) - sumUntil(i);
  }

  T sumUntil(int i) {
    T ans = zero;
    while(i > 0) {
      ans += bit[i];
      i -= i & -i;
    }
    return ans;
  }

  int lower_bound(T x) {
    int k = N;
    int res = 0;
    while(k > 0) {
      if (res + k <= N && bit[res + k] < x) {
        x -= bit[res + k];
        res += k;
      }
      k /= 2;
    }
    return res;
  }
};

const int MOD = 998244353;
#ifndef MInt_H

template <unsigned int MOD>
class MInt {
private:
  int v;

public:
  MInt() : v(0) {}

  MInt(long long x) {
    v = x % MOD;
    if (v < 0) v += MOD;
  }

  MInt& operator +=(const MInt &that) {
    v += that.v;
    if (v >= MOD) v -= MOD;
    return *this;
  }

  MInt& operator -=(const MInt &that) {
    v -= that.v;
    if (v < 0) v += MOD;
    return *this;
  }

  MInt& operator *=(const MInt &that) {
    v = (long long)(v) * that.v % MOD;
    return *this;
  }

  MInt& operator ++(int) {
    *this += 1;
    return *this;
  }

  MInt& operator --(int) {
    *this -= 1;
    return *this;
  }

  friend MInt operator+(const MInt& a, const MInt& b) {
    return MInt(a) += b;
  }

  friend MInt operator-(const MInt& a, const MInt& b) {
    return MInt(a) -= b;
  }

  friend MInt operator*(const MInt& a, const MInt& b) {
    return MInt(a) *= b;
  }

  friend std::ostream& operator<<(std::ostream& out, const MInt &a) {
    out << a.v;
    return out;
  }
};

template<unsigned int MOD> void __print(MInt<MOD> x) {std::cerr << x;}

#define MInt_H

#endif //MInt_H
using mint = MInt<MOD>;

class C {
public:
  void solve(std::istream& in, std::ostream& out) {
    ios::sync_with_stdio(false);
    cin.tie(nullptr);

    int N;
    in >> N;
    V<ll> Aori(N);
    in >> Aori;
    V<Pii> A;
    for (int i = 0; i < N; ++i) {
      A.pb({Aori[i], i});
    }
    sort(all(A), greater<>());
    debug(A);
    V<pair<ll, ll>> lt(N), gt(N);
    BIT bitLt(N), bitGt(N);
    BIT<ll> bitLtSum(N), bitGtSum(N);
    for (const auto &i : A) {
      int cntLt = bitLt.sumUntil(i._2);
      int sumLt = bitLtSum.sumUntil(i._2);
      debug(i, cntLt, sumLt);
      lt[i._2]._1 = cntLt;
      lt[i._2]._2 = sumLt;
      bitLt.add(i._2, 1);
      bitLtSum.add(i._2, i._1);
    }
    sort(all(A));
    for (const auto &i : A) {
      int cntGt = bitGt.sumUntil(N) - bitGt.sumUntil(i._2);
      int sumGt = bitGtSum.sumUntil(N) - bitGtSum.sumUntil(i._2);
      debug(i, cntGt, sumGt);
      gt[i._2]._1 = cntGt;
      gt[i._2]._2 = sumGt;
      bitGt.add(i._2, 1);
      bitGtSum.add(i._2, i._1);
    }
    debug(lt, gt);
    mint ans = 0;
    for (int i = 0; i < N; ++i) {
      ll cnt = lt[i]._1*gt[i]._1;
      debug(i, cnt);
      ans += cnt*Aori[i] + lt[i]._2*gt[i]._1 + lt[i]._1*gt[i]._2;
    }
    out << ans << endl;
  }
};


int main() {
	C solver;
	std::istream& in(std::cin);
	std::ostream& out(std::cout);
	solver.solve(in, out);
	return 0;
}
0