#include using namespace std; /* yukicoder interactive: 30 parallel Hit&Blow - Keep candidate set (|S|=30240) of still-possible strings. - Maintain for each past query q_j the histogram counts for the *remaining* unknown strings. - Estimate existence probabilities via iterative proportional fitting (IPF / raking) on the exponential-family model w(x) ∝ exp(sum_j theta_{j,HB(q_j,x)}). Query the element with maximum estimated existence probability. Extra time (up to 5s) is spent on: - More IPF sweeps (near-converged marginals) - Pairwise feasibility pruning using 2-query flow constraints: given two queries A,B, a remaining solution must realize both histograms simultaneously. For each (HB(A,x), HB(B,x)) type that cannot appear in ANY feasible subset, we can safely discard all candidates of that type. - Tight-bucket forcing: if for some query/category, remaining candidates count == required count, all of them are forced to exist (we push them to a "forced" queue). The program auto-throttles computation based on elapsed time. */ static constexpr int DIG = 10; static constexpr int LEN = 5; static constexpr int NALL = 30240; static constexpr int K = 21; // number of valid (hit,blow) pairs static int HB_ID[LEN+1][LEN+1]; static int ID_50; // HB_ID[5][0] struct Code { array d; uint16_t mask; // 10-bit }; static inline int popcnt10(uint16_t x){ return __builtin_popcount((unsigned)x); } static vector ALL; static vector ALL_STR; static inline uint8_t hb_id(int a_idx, int b_idx){ const auto &A = ALL[a_idx]; const auto &B = ALL[b_idx]; int hits = 0; for(int i=0;i> g; vector level, it; Dinic(int n_=0){ init(n_); } void init(int n_){ n = n_; g.assign(n, {}); level.assign(n, 0); it.assign(n, 0); } void add_edge(int fr,int to,int cap){ Edge a{to, (int)g[to].size(), cap}; Edge b{fr, (int)g[fr].size(), 0}; g[fr].push_back(a); g[to].push_back(b); } bool bfs(int s,int t){ fill(level.begin(), level.end(), -1); queue q; level[s] = 0; q.push(s); while(!q.empty()){ int v=q.front(); q.pop(); for(const auto &e: g[v]){ if(e.cap>0 && level[e.to]<0){ level[e.to]=level[v]+1; q.push(e.to); } } } return level[t]>=0; } int dfs(int v,int t,int f){ if(v==t) return f; for(int &i=it[v]; i<(int)g[v].size(); i++){ Edge &e = g[v][i]; if(e.cap<=0 || level[e.to]!=level[v]+1) continue; int ret = dfs(e.to, t, min(f, e.cap)); if(ret>0){ e.cap -= ret; g[e.to][e.rev].cap += ret; return ret; } } return 0; } int maxflow(int s,int t){ int flow=0; while(bfs(s,t)){ fill(it.begin(), it.end(), 0); while(true){ int f = dfs(s,t,1e9); if(!f) break; flow += f; } } return flow; } }; struct Timer { chrono::steady_clock::time_point st; Timer(){ st = chrono::steady_clock::now(); } double ms() const { auto now = chrono::steady_clock::now(); return chrono::duration(now - st).count(); } }; int main(){ ios::sync_with_stdio(false); cin.tie(nullptr); // Build HB_ID mapping for(int h=0; h<=LEN; h++) for(int b=0; b<=LEN; b++) HB_ID[h][b] = -1; int kk=0; for(int h=0; h<=LEN; h++) for(int b=0; b<=LEN-h; b++) HB_ID[h][b] = kk++; ID_50 = HB_ID[5][0]; // Generate all 30240 strings ALL.reserve(NALL); ALL_STR.reserve(NALL); for(int a=0;a<10;a++) for(int b=0;b<10;b++) if(b!=a) for(int c=0;c<10;c++) if(c!=a&&c!=b) for(int d=0;d<10;d++) if(d!=a&&d!=b&&d!=c) for(int e=0;e<10;e++) if(e!=a&&e!=b&&e!=c&&e!=d){ Code x; x.d = {(uint8_t)a,(uint8_t)b,(uint8_t)c,(uint8_t)d,(uint8_t)e}; x.mask = (uint16_t)((1u< alive(NALL, 1); vector asked(NALL, 0); vector w(NALL, 1.0); vector alive_list(NALL); iota(alive_list.begin(), alive_list.end(), 0); int found = 0; // History vector queries; // queried code index vector> cnt; // residual counts for remaining unknowns vector> cats; // cats[qid][i] = HB(query[qid], i) vector zeroMask; // bitmask of categories with zero remaining deque forced; // candidates proven to exist vector in_forced(NALL, 0); Timer timer; auto normalize = [&](int n_rem){ if(alive_list.empty()) return; double sum=0.0; for(int i: alive_list) sum += w[i]; if(sum<=0) { double uni = (double)n_rem / (double)alive_list.size(); for(int i: alive_list) w[i] = uni; return; } double sc = (double)n_rem / sum; for(int i: alive_list) w[i] *= sc; }; auto ipf = [&](int n_rem, int sweeps, double damp){ if(queries.empty() || alive_list.empty()) return; for(int it=0; it 0){ fac[r] = (double)cnt[q][r] / sum[r]; }else{ // No alive candidates in this category. // If judge says 0: keep factor 1 (no effect). Otherwise infeasible; set 0. fac[r] = (cnt[q][r]==0 ? 1.0 : 0.0); } if(damp!=1.0) fac[r] = pow(fac[r], damp); } for(int i: alive_list) w[i] *= fac[catq[i]]; } } normalize(n_rem); }; auto prune_by_mask = [&](int qid, uint32_t maskbits){ if(maskbits==0) return; const auto &catq = cats[qid]; vector nxt; nxt.reserve(alive_list.size()); for(int i: alive_list){ if(!alive[i]) continue; if(maskbits & (1u< all are forced. auto extract_forced = [&](){ if(alive_list.empty()) return; // Only do this when we still have time. if(timer.ms() > 4800) return; array, K> buckets; for(int q=0; q<(int)queries.size(); q++){ for(int r=0;r0 && (int)buckets[r].size()==need){ for(int idx: buckets[r]){ if(alive[idx] && !asked[idx] && !in_forced[idx]){ in_forced[idx]=1; forced.push_back(idx); } } } } } }; // Pairwise feasibility pruning for 2 queries (qa,qb): discard HB-type (ra,rb) that can never appear. auto pair_prune = [&](int qa, int qb, int n_rem, int max_checks){ if(qa==qb || n_rem<=0 || alive_list.empty()) return; if(timer.ms() > 4700) return; static int cap[K][K]; for(int a=0;a> types; types.reserve(K*K); for(int a=0;a max_checks) types.resize(max_checks); static uint8_t impossible[K][K]; for(int a=0;abool{ if(cnt[qa][ra] <= 0 || cnt[qb][rb] <= 0) return false; if(cap[ra][rb] <= 0) return false; // Fix 1 flow on (ra,rb) int need = n_rem - 1; if(need < 0) return true; // Build flow network int S = 0; int L0 = 1; int R0 = 1 + K; int T = 1 + K + K; Dinic mf(T+1); // Supplies (with one consumed at ra) for(int a=0;a0) mf.add_edge(S, L0+a, sup); } // Demands (with one consumed at rb) for(int b=0;b0) mf.add_edge(R0+b, T, dem); } // Inner edges (cap reduced by 1 on (ra,rb)) for(int a=0;a0) mf.add_edge(L0+a, R0+b, c); } } int f = mf.maxflow(S,T); return f==need; }; for(auto [ra,rb] : types){ if(timer.ms() > 4700) break; if(!feasible_with_lb(ra,rb)){ impossible[ra][rb] = 1; } } // prune candidates in impossible types vector nxt; nxt.reserve(alive_list.size()); for(int idx: alive_list){ int ra=cats[qa][idx]; int rb=cats[qb][idx]; if(impossible[ra][rb]){ alive[idx]=0; w[idx]=0.0; }else nxt.push_back(idx); } alive_list.swap(nxt); }; auto choose_query = [&](int n_rem)->int{ // Clean forced queue while(!forced.empty()){ int x = forced.front(); if(alive[x] && !asked[x]) return x; forced.pop_front(); } // Spend time to sharpen estimation when we have enough budget. double t = timer.ms(); int sweeps; double damp; if(t < 1000) { sweeps = 60; damp = 0.85; } else if(t < 2500) { sweeps = 50; damp = 0.85; } else if(t < 4000) { sweeps = 30; damp = 0.90; } else { sweeps = 10; damp = 0.95; } ipf(n_rem, sweeps, damp); int best = alive_list[0]; double bw = w[best]; for(int i: alive_list){ if(w[i] > bw){ bw = w[i]; best = i; } } return best; }; // Main interactive loop while(true){ if(found >= 30) break; if(alive_list.empty()){ // Should not happen; fallback break; } int n_rem = 30 - found; int qidx = choose_query(n_rem); // Ask cout << ALL_STR[qidx] << "\n"; cout.flush(); // required // Read 30 pairs (must read all) and build histogram array hist{}; hist.fill(0); bool bad = false; int H0=-2,B0=-2; for(int i=0;i<30;i++){ int h,b; if(!(cin >> h >> b)) return 0; // EOF if(i==0){ H0=h; B0=b; } if(h==-1 && b==-1) bad = true; if(h>=0 && h<=5 && b>=0 && b<=5-h){ hist[HB_ID[h][b]]++; } } if(bad){ // Already judged wrong return 0; } if(H0==5 && B0==0){ // Solved return 0; } int c50 = hist[ID_50]; bool new_found = (c50 > found); // Store query info queries.push_back(qidx); array resid = hist; resid[ID_50] -= c50; // remove already-found ones; remaining unknowns never contribute (5,0) cnt.push_back(resid); // Prepare cats for this query cats.emplace_back(NALL); int qid = (int)queries.size()-1; for(int i=0;i prune uint32_t z=0; for(int r=0;r= 2){ int last = (int)queries.size()-1; int nrem2 = 30 - found; // A few pairs involving the newest query const int checks = (timer.ms() < 1500 ? 260 : (timer.ms() < 3000 ? 180 : 80)); pair_prune(0, last, nrem2, checks); if(last>=1) pair_prune(1, last, nrem2, checks); if(last>=2) pair_prune(last-1, last, nrem2, checks); } // Try to extract forced elements extract_forced(); } return 0; }