結果
| 問題 |
No.1479 Matrix Eraser
|
| ユーザー |
ngtkana
|
| 提出日時 | 2021-03-06 00:08:43 |
| 言語 | Rust (1.83.0 + proconio) |
| 結果 |
RE
|
| 実行時間 | - |
| コード長 | 13,785 bytes |
| コンパイル時間 | 12,410 ms |
| コンパイル使用メモリ | 377,904 KB |
| 実行使用メモリ | 10,496 KB |
| 最終ジャッジ日時 | 2024-10-07 14:50:08 |
| 合計ジャッジ時間 | 14,591 ms |
|
ジャッジサーバーID (参考情報) |
judge4 / judge1 |
(要ログイン)
| ファイルパターン | 結果 |
|---|---|
| sample | AC * 2 |
| other | AC * 18 RE * 21 |
コンパイルメッセージ
warning: unused imports: `Leaf`, `Tuple`, `VecLen`
--> src/main.rs:170:27
|
170 | multi_token::{Leaf, Parser, ParserTuple, RawTuple, Tuple, VecLen},
| ^^^^ ^^^^^ ^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
warning: unused import: `with_str`
--> src/main.rs:426:35
|
426 | pub use self::i::{with_stdin, with_str};
| ^^^^^^^^
warning: unused imports: `ParserTuple`, `Parser`, `RawTuple`, `Token`, `Usize1`
--> src/main.rs:429:28
|
429 | pub use super::i::{Parser, ParserTuple, RawTuple, Token, Usize1};
| ^^^^^^ ^^^^^^^^^^^ ^^^^^^^^ ^^^^^ ^^^^^^
ソースコード
#[allow(unused_imports)]
#[cfg(feature = "dbg")]
use dbg::lg;
use hopkarp::hopkarp;
const MAX: usize = 100_000;
fn main() {
let mut buf = ngtio::with_stdin();
let h = buf.usize();
let w = buf.usize();
let mut a = vec![Vec::new(); MAX];
for i in 0..h {
for j in 0..w {
a[buf.usize()].push([i, j]);
}
}
let mut ans = 0;
for v in a[1..].iter().rev().filter(|v| !v.is_empty()) {
let mut g = vec![Vec::new(); h];
v.iter().for_each(|&[i, j]| g[i].push(j));
let aug = hopkarp(w, &g).count;
ans += aug;
}
println!("{}", ans);
}
// hopkarp {{{
#[allow(dead_code)]
mod hopkarp {
use std::collections::VecDeque;
#[derive(Clone, Debug, Default, Hash, PartialEq)]
pub struct HopkarpResult {
pub count: usize,
pub forward: Box<[Option<usize>]>,
pub backward: Box<[Option<usize>]>,
pub left: Box<[bool]>,
pub right: Box<[bool]>,
}
pub fn hopkarp(w: usize, graph: &[Vec<usize>]) -> HopkarpResult {
let h = graph.len();
let mut forward = vec![None; h].into_boxed_slice();
let mut backward = vec![None; w].into_boxed_slice();
let (left, right) = loop {
let dist = bfs(graph, &forward, &backward);
if !dfs(&graph, &dist, &mut forward, &mut backward) {
break construct_minimum_cut(graph, &dist, &backward);
}
};
let count = forward.iter().filter(|b| b.is_some()).count();
HopkarpResult {
count,
forward,
backward,
left,
right,
}
}
fn construct_minimum_cut(
graph: &[Vec<usize>],
dist: &[u32],
backward: &[Option<usize>],
) -> (Box<[bool]>, Box<[bool]>) {
use std::u32::MAX;
let left = dist
.iter()
.map(|&x| x != MAX)
.collect::<Vec<_>>()
.into_boxed_slice();
let mut right = vec![false; backward.len()].into_boxed_slice();
for x in left.iter().enumerate().filter(|&(_, &b)| b).map(|(x, _)| x) {
graph[x]
.iter()
.copied()
.filter(|&y| backward[y] != Some(x))
.for_each(|y| right[y] = true);
}
(left, right)
}
fn dfs(
graph: &[Vec<usize>],
dist: &[u32],
forward: &mut [Option<usize>],
backward: &mut [Option<usize>],
) -> bool {
fn rec(
x: usize,
graph: &[Vec<usize>],
dist: &[u32],
used: &mut [bool],
forward: &mut [Option<usize>],
backward: &mut [Option<usize>],
) -> bool {
used[x] = true;
for &y in &graph[x] {
let found = if let Some(z) = backward[y] {
!used[z]
&& dist[x] + 1 == dist[z]
&& rec(z, graph, dist, used, forward, backward)
} else {
true
};
if found {
backward[y] = Some(x);
forward[x] = Some(y);
return true;
}
}
false
}
let mut has_aug = false;
let mut used = vec![false; forward.len()];
for x in 0..used.len() {
if forward[x].is_none() {
has_aug |= rec(x, graph, dist, &mut used, forward, backward);
}
}
has_aug
}
fn bfs(
graph: &[Vec<usize>],
forward: &[Option<usize>],
backward: &[Option<usize>],
) -> Vec<u32> {
use std::u32::MAX;
let mut dist = vec![MAX; forward.len()];
let mut queue = forward
.iter()
.enumerate()
.filter(|&(_, b)| b.is_none())
.map(|(i, _)| i)
.inspect(|&i| dist[i] = 0)
.collect::<VecDeque<_>>();
while let Some(x) = queue.pop_front() {
for &y in &graph[x] {
if let Some(z) = backward[y] {
if dist[z] == MAX {
dist[z] = dist[x] + 1;
queue.push_back(z);
}
}
}
}
dist
}
}
// }}}
// template {{{
#[cfg(not(feature = "dbg"))]
#[allow(unused_macros)]
#[macro_export]
macro_rules! lg {
($($expr:expr),*) => {};
}
#[allow(dead_code)]
mod ngtio {
mod i {
use std::{
io::{self, BufRead},
iter,
};
pub use self::{
multi_token::{Leaf, Parser, ParserTuple, RawTuple, Tuple, VecLen},
token::{Token, Usize1},
};
pub fn with_stdin() -> Tokenizer<io::BufReader<io::Stdin>> {
io::BufReader::new(io::stdin()).tokenizer()
}
pub fn with_str(src: &str) -> Tokenizer<&[u8]> {
src.as_bytes().tokenizer()
}
pub struct Tokenizer<S: BufRead> {
queue: Vec<String>, // FIXME: String のみにすると速そうです。
scanner: S,
}
macro_rules! prim_method {
($name:ident: $T:ty) => {
pub fn $name(&mut self) -> $T {
<$T>::leaf().parse(self)
}
};
($name:ident) => {
prim_method!($name: $name);
};
}
macro_rules! prim_methods {
($name:ident: $T:ty; $($rest:tt)*) => {
prim_method!($name:$T);
prim_methods!($($rest)*);
};
($name:ident; $($rest:tt)*) => {
prim_method!($name);
prim_methods!($($rest)*);
};
() => ()
}
impl<S: BufRead> Tokenizer<S> {
pub fn token(&mut self) -> String {
self.load();
self.queue.pop().expect("入力が終了したのですが。")
}
pub fn new(scanner: S) -> Self {
Self {
queue: Vec::new(),
scanner,
}
}
fn load(&mut self) {
while self.queue.is_empty() {
let mut s = String::new();
let length = self.scanner.read_line(&mut s).unwrap(); // 入力が UTF-8 でないときにエラーだそうです。
if length == 0 {
break;
}
self.queue = s.split_whitespace().rev().map(str::to_owned).collect();
}
}
pub fn skip_line(&mut self) {
assert!(
self.queue.is_empty(),
"行の途中で呼ばないでいただきたいです。現在のトークンキュー: {:?}",
&self.queue
);
self.load();
}
pub fn end(&mut self) {
self.load();
assert!(self.queue.is_empty(), "入力はまだあります!");
}
pub fn parse<T: Token>(&mut self) -> T::Output {
T::parse(&self.token())
}
pub fn parse_collect<T: Token, B>(&mut self, n: usize) -> B
where
B: iter::FromIterator<T::Output>,
{
iter::repeat_with(|| self.parse::<T>()).take(n).collect()
}
pub fn tuple<T: RawTuple>(&mut self) -> <T::LeafTuple as Parser>::Output {
T::leaf_tuple().parse(self)
}
pub fn vec<T: Token>(&mut self, len: usize) -> Vec<T::Output> {
T::leaf().vec(len).parse(self)
}
pub fn vec_tuple<T: RawTuple>(
&mut self,
len: usize,
) -> Vec<<T::LeafTuple as Parser>::Output> {
T::leaf_tuple().vec(len).parse(self)
}
pub fn vec2<T: Token>(&mut self, height: usize, width: usize) -> Vec<Vec<T::Output>> {
T::leaf().vec(width).vec(height).parse(self)
}
pub fn vec2_tuple<T>(
&mut self,
height: usize,
width: usize,
) -> Vec<Vec<<T::LeafTuple as Parser>::Output>>
where
T: RawTuple,
{
T::leaf_tuple().vec(width).vec(height).parse(self)
}
prim_methods! {
u8; u16; u32; u64; u128; usize;
i8; i16; i32; i64; i128; isize;
f32; f64;
char; string: String;
}
}
mod token {
use super::multi_token::Leaf;
use std::{any, fmt, marker, str};
pub trait Token: Sized {
type Output;
fn parse(s: &str) -> Self::Output;
fn leaf() -> Leaf<Self> {
Leaf(marker::PhantomData)
}
}
impl<T> Token for T
where
T: str::FromStr,
<T as str::FromStr>::Err: fmt::Debug,
{
type Output = T;
fn parse(s: &str) -> Self::Output {
s.parse().unwrap_or_else(|_| {
panic!("Parse error!: ({}: {})", s, any::type_name::<T>(),)
})
}
}
pub struct Usize1 {}
impl Token for Usize1 {
type Output = usize;
fn parse(s: &str) -> Self::Output {
usize::parse(s)
.checked_sub(1)
.expect("Parse error! (Zero substruction error of Usize1)")
}
}
}
mod multi_token {
use super::{Token, Tokenizer};
use std::{io::BufRead, iter, marker};
pub trait Parser: Sized {
type Output;
fn parse<S: BufRead>(&self, server: &mut Tokenizer<S>) -> Self::Output;
fn vec(self, len: usize) -> VecLen<Self> {
VecLen { len, elem: self }
}
}
pub struct Leaf<T>(pub(super) marker::PhantomData<T>);
impl<T: Token> Parser for Leaf<T> {
type Output = T::Output;
fn parse<S: BufRead>(&self, server: &mut Tokenizer<S>) -> T::Output {
server.parse::<T>()
}
}
pub struct VecLen<T> {
pub len: usize,
pub elem: T,
}
impl<T: Parser> Parser for VecLen<T> {
type Output = Vec<T::Output>;
fn parse<S: BufRead>(&self, server: &mut Tokenizer<S>) -> Self::Output {
iter::repeat_with(|| self.elem.parse(server))
.take(self.len)
.collect()
}
}
pub trait RawTuple {
type LeafTuple: Parser;
fn leaf_tuple() -> Self::LeafTuple;
}
pub trait ParserTuple {
type Tuple: Parser;
fn tuple(self) -> Self::Tuple;
}
pub struct Tuple<T>(pub T);
macro_rules! impl_tuple {
($($t:ident: $T:ident),*) => {
impl<$($T),*> Parser for Tuple<($($T,)*)>
where
$($T: Parser,)*
{
type Output = ($($T::Output,)*);
#[allow(unused_variables)]
fn parse<S: BufRead >(&self, server: &mut Tokenizer<S>) -> Self::Output {
match self {
Tuple(($($t,)*)) => {
($($t.parse(server),)*)
}
}
}
}
impl<$($T: Token),*> RawTuple for ($($T,)*) {
type LeafTuple = Tuple<($(Leaf<$T>,)*)>;
fn leaf_tuple() -> Self::LeafTuple {
Tuple(($($T::leaf(),)*))
}
}
impl<$($T: Parser),*> ParserTuple for ($($T,)*) {
type Tuple = Tuple<($($T,)*)>;
fn tuple(self) -> Self::Tuple {
Tuple(self)
}
}
};
}
impl_tuple!();
impl_tuple!(t1: T1);
impl_tuple!(t1: T1, t2: T2);
impl_tuple!(t1: T1, t2: T2, t3: T3);
impl_tuple!(t1: T1, t2: T2, t3: T3, t4: T4);
impl_tuple!(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5);
impl_tuple!(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6);
impl_tuple!(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7);
impl_tuple!(
t1: T1,
t2: T2,
t3: T3,
t4: T4,
t5: T5,
t6: T6,
t7: T7,
t8: T8
);
}
trait Scanner: BufRead + Sized {
fn tokenizer(self) -> Tokenizer<Self> {
Tokenizer::new(self)
}
}
impl<R: BufRead> Scanner for R {}
}
pub use self::i::{with_stdin, with_str};
pub mod prelude {
pub use super::i::{Parser, ParserTuple, RawTuple, Token, Usize1};
}
}
// }}}
ngtkana