对于一个叫做bit的bitset:
bit.size() 返回大小(位数)
bit.count() 返回1的个数
bit.any() 返回是否有1
bit.none() 返回是否没有1
bit.set() 全都变成1
bit.set(p) 将第p + 1位变成1(bitset是从第0位开始的!)
bit.set(p, x) 将第p + 1位变成x
bit.reset() 全都变成0
bit.reset(p) 将第p + 1位变成0
bit.flip() 全都取反
bit.flip(p) 将第p + 1位取反
bit.to_ulong() 返回它转换为unsigned long的结果,如果超出范围则报错
bit.to_ullong() 返回它转换为unsigned long long的结果,如果超出范围则报错
bit.to_string() 返回它转换为string的结果
#include <algorithm>
#include <cmath>
#include <cstdio>
#include <cstring>
#include <bitset>
#include <iostream>
#include <unordered_map>
#include <queue>
#include <vector>
using namespace std;
typedef long long ll;
const ll N = 3e5 + 99;
ll mod = 1e9 + 7;
const ll maxn = 1e7;
unordered_map<ll, ll> cnt;
ll a[N], b[N];
bitset <1 << 30> vis;
void solve() {
ll n, m;
cin >> n >> m;
ll len = 0;
ll ma = -1;
for (int i = 1; i <= n; i ++) {
scanf("%lld", &a[i]);
}
for (int j = 1; j <= m; j ++) {
scanf("%lld",&b[j]);
if (!cnt.count(b[j]))cnt[b[j]] = 0;
vis.set(b[j]);
cnt[b[j]]++;
}
ll ans = 0;
for (int j = 0; j < 30; j++) {
bitset<30>d;
for (int k = j + 1; k < 30; k++) {
for (int i = 1; i <= n; i++) {
bitset<30> t = a[i];
t.flip(j);t.flip(k);
int dd = t.to_ulong();
if (vis[dd])
ans += cnt[dd ];
}
}
}
printf("%lld\n", ans);
}
signed main() {
ll t = 1;
while (t--) solve();
return 0;
}
原文:https://www.cnblogs.com/Xiao-yan/p/14607863.html