題意
給出一個序列,求出有多少區間滿足(A[l] oplus A[l+1] oplus dots oplus A[r] = A[l] + A[l + 1] +dots+ A[r])
Sol
一個區間能滿足要求一定是所有bit上最多隻有一個1
這玩意兒顯然有單調性,two point掃一遍
#include<cstdio>
#define LL long long
using namespace std;
const int MAXN = 2e5 + 10;
inline int read() {
char c = getchar(); int x = 0, f = 1;
while(c < `0` || c > `9`) {if(c == `-`) f = -1; c = getchar();}
while(c >= `0` && c <= `9`) x = x * 10 + c - `0`, c = getchar();
return x * f;
}
int N, a[MAXN];
main() {
N = read();
for(int i = 1; i <= N; i++) a[i] = read();
LL l = 1, sxor = 0, sum = 0, ans = 0;
for(int i = 1; i <= N; i++) {
sum += a[i]; sxor ^= a[i];
while(sxor != sum && (l < i))
sum -= a[l], sxor ^= a[l++];
ans += i - l + 1;
}
printf("%lld", ans);
return 0;
}