// by GPT 5.2 Pro #include using namespace std; // ============================================================= // 30-parallel mixed Hit&Blow solver (C++17) // ============================================================= static constexpr int L = 5; static constexpr int DIG = 10; static constexpr int T = 21; // number of (H,B) pairs with H+B<=5 static constexpr int M_TOTAL = 30; static int IDX[L+1][L+1]; // IDX[hit][blow] -> [0..20] struct Code { array d{}; uint16_t mask = 0; string s; }; static vector generate_all_codes() { vector all; all.reserve(30240); array cur{}; string buf(L, '0'); function dfs = [&](int pos, uint16_t mask) { if (pos == L) { Code c; c.d = cur; c.mask = mask; c.s = buf; all.push_back(c); return; } for (int dig = 0; dig < DIG; dig++) { if (mask & (1u << dig)) continue; cur[pos] = (uint8_t)dig; buf[pos] = char('0' + dig); dfs(pos + 1, mask | (1u << dig)); } }; dfs(0, 0); return all; } static inline uint8_t type_idx(const Code &a, const Code &b) { // a: secret, b: query int hit = 0; for (int i = 0; i < L; i++) hit += (a.d[i] == b.d[i]); int common = __builtin_popcount((unsigned)(a.mask & b.mask)); int blow = common - hit; return (uint8_t)IDX[hit][blow]; } struct Solver { const vector &codes; int N; vector possible; // candidate can be an unfound secret vector asked; vector found; int foundCount = 0; struct Record { int qid; array need; // histogram for remaining secrets array aliveCnt; // how many possible candidates remain in each bucket array, T> members; // static list of all candidates by type for this query }; vector recs; vector> typeCache; // typeCache[r][cid] // planned list of remaining secrets if we solved uniquely in endgame vector planned; explicit Solver(const vector &codes_) : codes(codes_), N((int)codes_.size()), possible(N, 1), asked(N, 0), found(N, 0) {} void kill_candidate(int cid) { if (!possible[cid]) return; possible[cid] = 0; for (int r = 0; r < (int)recs.size(); r++) { int t = typeCache[r][cid]; recs[r].aliveCnt[t]--; } } void eliminate_bucket(int r, int t) { // if need==0 then every candidate in that bucket cannot be a remaining secret for (int cid : recs[r].members[t]) { if (possible[cid] && !asked[cid]) kill_candidate(cid); } } void add_record(int qid, const array &hist) { Record rec; rec.qid = qid; rec.need = hist; rec.aliveCnt.fill(0); for (int t = 0; t < T; t++) rec.members[t].clear(); vector row(N); for (int cid = 0; cid < N; cid++) { uint8_t t = type_idx(codes[cid], codes[qid]); row[cid] = t; rec.members[t].push_back(cid); } recs.push_back(std::move(rec)); typeCache.push_back(std::move(row)); int r = (int)recs.size() - 1; // compute aliveCnt for this new record for (int cid = 0; cid < N; cid++) { if (possible[cid] && !asked[cid]) recs[r].aliveCnt[typeCache[r][cid]]++; } // immediate propagation for zero buckets for (int t = 0; t < T; t++) { if (recs[r].need[t] == 0) eliminate_bucket(r, t); } } void apply_found_secret(int sid) { // remove its contribution from all previous records (because future histograms exclude it as (5,0)) for (int r = 0; r < (int)recs.size(); r++) { int t = typeCache[r][sid]; if (recs[r].need[t] > 0) { recs[r].need[t]--; if (recs[r].need[t] == 0) eliminate_bucket(r, t); } } } void observe(int qid, const vector> &hb) { // hb is sorted lexicographically by judge if (hb.empty()) return; if (hb[0].first == -1 && hb[0].second == -1) { // judged as invalid -> exit immediately exit(0); } int cnt50 = 0; for (auto &p : hb) if (p.first == 5 && p.second == 0) cnt50++; int newFound = cnt50 - foundCount; // 0 or 1 (we never re-ask) asked[qid] = 1; kill_candidate(qid); if (newFound == 1) { found[qid] = 1; foundCount++; apply_found_secret(qid); } // build histogram for remaining secrets excluding (5,0) array hist{}; hist.fill(0); for (auto &p : hb) { if (p.first == 5 && p.second == 0) continue; hist[IDX[p.first][p.second]]++; } add_record(qid, hist); } int find_forced_query() { // if for some record-bucket, need == aliveCnt > 0, all candidates in that bucket must be secrets. for (int r = 0; r < (int)recs.size(); r++) { for (int t = 0; t < T; t++) { int need = recs[r].need[t]; int alive = recs[r].aliveCnt[t]; if (need > 0 && alive > 0 && need == alive) { for (int cid : recs[r].members[t]) { if (possible[cid] && !asked[cid]) return cid; } } } } return -1; } bool try_endgame_unique_solve() { planned.clear(); int M = M_TOTAL - foundCount; // hard limits: keep it safe vector cand; cand.reserve(N); for (int cid = 0; cid < N; cid++) if (possible[cid] && !asked[cid]) cand.push_back(cid); if (M > 8) return false; if ((int)cand.size() > 2000) return false; if (recs.empty()) return false; int R = (int)recs.size(); // choose a "base" record with small branching (rough heuristic) int bestR = 0; double bestScore = 1e100; for (int r = 0; r < R; r++) { array sz{}; sz.fill(0); for (int cid : cand) sz[typeCache[r][cid]]++; double score = 1.0; for (int t = 0; t < T; t++) { int k = recs[r].need[t]; if (k == 0) continue; // approximate branching as (bucket_size^k) score *= pow((double)max(1, sz[t]), (double)k); if (score > bestScore) break; } if (score < bestScore) { bestScore = score; bestR = r; } } // pre-build per-type candidate lists (indices in cand vector) vector> bucket(T); bucket.assign(T, {}); bucket.shrink_to_fit(); // no-op but keeps intent clear bucket.assign(T, {}); for (int i = 0; i < (int)cand.size(); i++) { int cid = cand[i]; bucket[typeCache[bestR][cid]].push_back(i); } // copy needs vector> need(R); for (int r = 0; r < R; r++) need[r] = recs[r].need; vector used(cand.size(), 0); vector cur; vector> solutions; // list of types to process in base record vector baseTypes; for (int t = 0; t < T; t++) if (need[bestR][t] > 0) baseTypes.push_back(t); function dfs_choose = [&](int ti, int pickLeft, int startIdx) { if ((int)solutions.size() >= 2) return; if (ti == (int)baseTypes.size()) { if ((int)cur.size() != M) return; // verify all needs are zero for (int r = 0; r < R; r++) { for (int t = 0; t < T; t++) if (need[r][t] != 0) return; } solutions.push_back(cur); return; } int t = baseTypes[ti]; if (pickLeft == -1) pickLeft = need[bestR][t]; if (pickLeft == 0) { dfs_choose(ti + 1, -1, 0); return; } auto &vec = bucket[t]; for (int p = startIdx; p < (int)vec.size(); p++) { int idx = vec[p]; if (used[idx]) continue; int cid = cand[idx]; // apply used[idx] = 1; cur.push_back(cid); bool ok = true; for (int r = 0; r < R; r++) { int tt = typeCache[r][cid]; need[r][tt]--; if (need[r][tt] < 0) ok = false; } if (ok) dfs_choose(ti, pickLeft - 1, p + 1); // undo for (int r = 0; r < R; r++) { int tt = typeCache[r][cid]; need[r][tt]++; } cur.pop_back(); used[idx] = 0; if ((int)solutions.size() >= 2) return; } }; dfs_choose(0, -1, 0); if ((int)solutions.size() == 1) { planned = std::move(solutions[0]); return true; } planned.clear(); return false; } int choose_next() { if (foundCount == M_TOTAL) return -1; if (!planned.empty()) { int q = planned.back(); planned.pop_back(); return q; } int forced = find_forced_query(); if (forced != -1) return forced; int M = M_TOTAL - foundCount; // endgame: if we can uniquely determine remaining secrets, do it. if (M <= 8) { if (try_endgame_unique_solve() && !planned.empty()) { int q = planned.back(); planned.pop_back(); return q; } } // first query: fixed if (recs.empty()) return 0; // "01234" is generated first in lexicographic order // collect candidates vector cand; cand.reserve(N); for (int cid = 0; cid < N; cid++) if (possible[cid] && !asked[cid]) cand.push_back(cid); if (cand.empty()) { // fallback (shouldn't happen) for (int cid = 0; cid < N; cid++) if (!asked[cid]) return cid; return 0; } // --------------------------------------------------------- // IPF (Iterative Proportional Fitting) to estimate marginal weights // --------------------------------------------------------- int R = (int)recs.size(); vector w(cand.size(), (double)M / (double)cand.size()); vector sum(T), factor(T); const int IPF_IT = 12; for (int it = 0; it < IPF_IT; it++) { for (int r = 0; r < R; r++) { fill(sum.begin(), sum.end(), 0.0); for (int i = 0; i < (int)cand.size(); i++) { int cid = cand[i]; sum[typeCache[r][cid]] += w[i]; } for (int t = 0; t < T; t++) { if (recs[r].need[t] == 0) { factor[t] = 0.0; } else if (sum[t] <= 0.0) { factor[t] = 0.0; } else { factor[t] = (double)recs[r].need[t] / sum[t]; } } for (int i = 0; i < (int)cand.size(); i++) { int cid = cand[i]; w[i] *= factor[typeCache[r][cid]]; } } // normalize to sum M double tot = 0.0; for (double x : w) tot += x; if (tot <= 0.0) break; double scale = (double)M / tot; for (double &x : w) x *= scale; } // map weight by id (0 for non-candidates) vector wById(N, 0.0); for (int i = 0; i < (int)cand.size(); i++) wById[cand[i]] = w[i]; // best candidate by w int bestCid = cand[0]; double bestW = w[0]; for (int i = 1; i < (int)cand.size(); i++) { if (w[i] > bestW) { bestW = w[i]; bestCid = cand[i]; } } // If confidence is high enough, just query it (reduces misses) if (bestW >= 0.70) return bestCid; // --------------------------------------------------------- // Otherwise: choose a query that is informative. // We score queries by entropy of predicted bucket-weight distribution // + small term for "likely to create zero bucket" elimination // + small term for hit probability. // --------------------------------------------------------- vector order(cand.size()); iota(order.begin(), order.end(), 0); nth_element(order.begin(), order.begin() + min(400, order.size()), order.end(), [&](int a, int b){ return w[a] > w[b]; }); unordered_set poolSet; poolSet.reserve(3000); // top weighted candidates int TOP = min(400, (int)cand.size()); vector topIdx(TOP); for (int i = 0; i < TOP; i++) topIdx[i] = order[i]; sort(topIdx.begin(), topIdx.end(), [&](int a, int b){ return w[a] > w[b]; }); for (int i = 0; i < TOP; i++) poolSet.insert(cand[topIdx[i]]); // random global queries to escape local optimum static std::mt19937_64 rng(1234567); uniform_int_distribution uni(0, N-1); int RND = 1800; while ((int)poolSet.size() < TOP + RND) { int qid = uni(rng); if (asked[qid]) continue; poolSet.insert(qid); } vector pool; pool.reserve(poolSet.size()); for (int qid : poolSet) pool.push_back(qid); vector bWeight(T); vector bCount(T); int bestQ = bestCid; double bestScore = -1e100; for (int qid : pool) { fill(bWeight.begin(), bWeight.end(), 0.0); fill(bCount.begin(), bCount.end(), 0); // bucket weights/counts over candidates for (int i = 0; i < (int)cand.size(); i++) { int sid = cand[i]; int t = type_idx(codes[sid], codes[qid]); bWeight[t] += w[i]; bCount[t] += 1; } // entropy double ent = 0.0; for (int t = 0; t < T; t++) { if (bWeight[t] <= 0.0) continue; double p = bWeight[t] / (double)M; if (p > 0.0) ent -= p * log(p); } // expected elimination via "some bucket becomes zero" (rough) double expElim = 0.0; for (int t = 0; t < T; t++) { if (bWeight[t] <= 0.0) continue; double p = bWeight[t] / (double)M; double probZero = pow(max(0.0, 1.0 - p), (double)M); expElim += (double)bCount[t] * probZero; } double hitP = wById[qid]; // 0 if not a candidate double score = ent + 0.05 * (expElim / (double)cand.size()) + 0.7 * hitP; if (score > bestScore) { bestScore = score; bestQ = qid; } } return bestQ; } }; int main() { // ios::sync_with_stdio(false); // cin.tie(nullptr); // build IDX table int id = 0; for (int h = 0; h <= L; h++) { for (int b = 0; b <= L - h; b++) { IDX[h][b] = id++; } } auto codes = generate_all_codes(); Solver solver(codes); while (true) { int qid = solver.choose_next(); if (qid < 0) break; cout << codes[qid].s << "\n"; fflush(stdout); // flush is required :contentReference[oaicite:4]{index=4} vector> hb(M_TOTAL); for (int i = 0; i < M_TOTAL; i++) { if (!(cin >> hb[i].first >> hb[i].second)) return 0; } // The judge returns pairs sorted lexicographically. When (H0,B0)=(5,0), // all strings are already found -> terminate. :contentReference[oaicite:5]{index=5} if (hb[0].first == 5 && hb[0].second == 0) return 0; if (hb[0].first == -1 && hb[0].second == -1) return 0; solver.observe(qid, hb); } return 0; }