note
note
PTIT.Nutriboost
1
PTIT.Nutriboost 2
9.7 Highest Exponent Factorial . . . . . . . . . . 27 return (A.l/block_size != B.l/block_size)? void update(int &L, int &R, int qL, int qR){
9.8 Miller - Rabin . . . . . . . . . . . . . . . . . . 27 (A.l/block_size < B.l/block_size) : (A.r < while (L > qL) add(--L);
B.r); while (R < qR) add(++R);
9.9 Mod Integer . . . . . . . . . . . . . . . . . . . 27
});
9.10 Mod Inv . . . . . . . . . . . . . . . . . . . . . 28 vector <int> res; while (L < qL) del(L++);
9.11 Mod Mul . . . . . . . . . . . . . . . . . . . . 28 res.resize((int)Q.size()); while (R > qR) del(R--);
9.12 Mod Pow . . . . . . . . . . . . . . . . . . . . 28 }
9.13 Number Theoretic Transform . . . . . . . . . 28 int L = 1, R = 0;
for(query q: Q){ vector <int> MoQueries(int n, vector <query> Q){
9.14 Pollard Rho Factorize . . . . . . . . . . . . . 28 while (L > q.l) add(--L); block_size = sqrt((int)nodes.size());
9.15 Primes . . . . . . . . . . . . . . . . . . . . . . 28 while (R < q.r) add(++R); sort(Q.begin(), Q.end(), [](const query &A, const
9.16 Totient Sieve . . . . . . . . . . . . . . . . . . 29 query &B){
9.17 Totient . . . . . . . . . . . . . . . . . . . . . . 29 while (L < q.l) del(L++); return (ST[A.l]/block_size !=
while (R > q.r) del(R--); ST[B.l]/block_size)? (ST[A.l]/block_size <
ST[B.l]/block_size) : (ST[A.r] < ST[B.r]);
10 Probability and Statistics 29 res[q.pos] = calc(1, R-L+1); });
10.1 Continuous Distributions . . . . . . . . . . . 29 } vector <int> res;
10.1.1 Uniform distribution . . . . . . . . . . 29 return res; res.resize((int)Q.size());
10.1.2 Exponential distribution . . . . . . . . 29 }
10.1.3 Normal distribution . . . . . . . . . . 29 LCA lca;
lca.initialize(n);
10.2 Discrete Distributions . . . . . . . . . . . . . 29
10.2.1 Binomial distribution . . . . . . . . . 29 1.2 Mo’s Algorithms on Trees int L = 1, R = 0;
10.2.2 First success distribution . . . . . . . 30 for(query q: Q){
10.2.3 Poisson distribution . . . . . . . . . . 30 int u = q.l, v = q.r;
/* if(ST[u] > ST[v]) swap(u, v); // assume that
10.3 Probability Theory . . . . . . . . . . . . . . . 30 Given a tree with N nodes and Q queries. Each node has S[u] <= S[v]
an integer weight. int parent = lca.get(u, v);
11 Strings 30 Each query provides two numbers u and v, ask for how
11.1 Hashing . . . . . . . . . . . . . . . . . . . . . 30 many different integers weight of nodes if(parent == u){
there are on path from u to v. int qL = ST[u], qR = ST[v];
11.2 Incremental Aho Corasick . . . . . . . . . . . 30
update(L, R, qL, qR);
11.3 KMP . . . . . . . . . . . . . . . . . . . . . . . 31 ---------- }else{
11.4 Minimal String Rotation . . . . . . . . . . . . 31 Modify DFS: int qL = EN[u], qR = ST[v];
11.5 Suffix Array . . . . . . . . . . . . . . . . . . . 31 ---------- update(L, R, qL, qR);
11.6 Suffix Automation . . . . . . . . . . . . . . . 32 For each node u, maintain the start and the end DFS if(cnt_val[a[parent]] == 0)
time. Let’s call them ST(u) and EN(u). res[q.pos] += 1;
11.7 Suffix Tree . . . . . . . . . . . . . . . . . . . . 32 => For each query, a node is considered if its }
11.8 Z Algorithm . . . . . . . . . . . . . . . . . . . 33 occurrence count is one.
res[q.pos] += cur_ans;
-------------- }
1 Algorithms Query solving: return res;
-------------- }
1.1 Mo’s Algorithm Let’s query be (u, v). Assume that ST(u) <= ST(v).
Denotes P as LCA(u, v).
return max(a, b); //merge left and right queries } 2.8 Hash Table
} int lca(int u, int v) {
inline void pull(int n) { if (dep[u] < dep[v]) swap(u, v);
t[n] = max(t[lc], t[rc]); //merge lower nodes of for (int k = LG; k >= 0; k--) if (dep[par[u][k]] >= /*
the tree to get the parent node dep[v]) u = par[u][k]; * Micro hash table, can be used as a set.
} if (u == v) return u; * Very efficient vs std::set
void build(int n, int b, int e) { for (int k = LG; k >= 0; k--) if (par[u][k] != *
if(b == e) { par[v][k]) u = par[u][k], v = par[v][k]; */
t[n] = 0; return par[u][0];
return; } const int MN = 1001;
} int kth(int u, int k) { struct ht {
int mid = (b + e) >> 1; assert(k >= 0); int _s[(MN + 10) >> 5];
build(lc, b, mid); for (int i = 0; i <= LG; i++) if (k & (1 << i)) u = int len;
build(rc, mid + 1, e); par[u][i]; void set(int id) {
pull(n); return u; len++;
} } _s[id >> 5] |= (1LL << (id & 31));
void upd(int n, int b, int e, int i, int j, int v) { int T, head[N], st[N], en[N]; }
push(n, b, e); void dfs_hld(int u) { bool is_set(int id) {
if(j < b || e < i) return; st[u] = ++T; return _s[id >> 5] & (1LL << (id & 31));
if(i <= b && e <= j) { for (auto v : g[u]) { }
lazy[n] += v; head[v] = (v == g[u][0] ? head[u] : v); };
push(n, b, e); dfs_hld(v);
return; }
} en[u] = T;
int mid = (b + e) >> 1; } 2.9 Li Chao Tree
upd(lc, b, mid, i, j, v);
upd(rc, mid + 1, e, i, j, v); int n;
pull(n); // LiChao SegTree
} int query_path(int u, int v) { // Copied from https://judge.yosupo.jp/submission/60250
int query(int n, int b, int e, int i, int j) { int ans = -inf; //
push(n, b, e); while(head[u] != head[v]) { // Tested:
if(i > e || b > j) return -inf; if (dep[head[u]] < dep[head[v]]) swap(u, v); // - https://judge.yosupo.jp/problem/segment_add_get_min
if(i <= b && e <= j) return t[n]; ans = max(ans, t.query(1, 1, n, st[head[u]], // - https://judge.yosupo.jp/problem/line_add_get_min
int mid = (b + e) >> 1; st[u])); // - (convex hull trick)
return combine(query(lc, b, mid, i, j), query(rc, u = par[head[u]][0]; https://oj.vnoi.info/problem/vmpizza
mid + 1, e, i, j)); } // - https://oj.vnoi.info/problem/vomario
} if (dep[u] > dep[v]) swap(u, v); using ll = long long;
} t; ans = max(ans, t.query(1, 1, n, st[u], st[v])); const ll inf = 2e18;
return ans;
vector<int> g[N]; } struct Line {
int par[N][LG + 1], dep[N], sz[N]; ll m, c;
void dfs(int u, int p = 0) { void update_path(int u, int v, int val) { ll eval(ll x) {
par[u][0] = p; while(head[u] != head[v]) { return m * x + c;
dep[u] = dep[p] + 1; if (dep[head[u]] < dep[head[v]]) swap(u, v); }
sz[u] = 1; t.upd(1, 1, n, st[head[u]], st[u], val); };
for (int i = 1; i <= LG; i++) par[u][i] = u = par[head[u]][0]; struct node {
par[par[u][i - 1]][i - 1]; } Line line;
if (p) g[u].erase(find(g[u].begin(), g[u].end(), p)); if (dep[u] > dep[v]) swap(u, v); node* left = nullptr;
for (auto &v : g[u]) if (v != p) { t.upd(1, 1, n, st[u], st[v], val); node* right = nullptr;
dfs(v, u); } node(Line line) : line(line) {}
sz[u] += sz[v]; //https://www.hackerrank.com/challenges/subtrees-and-paths/problem void add_segment(Line nw, int l, int r, int L, int R)
if(sz[v] > sz[g[u][0]]) swap(v, g[u][0]); {
} if (l > r || r < L || l > R) return;
int m = (l + 1 == r ? l : (l + r) / 2);
PTIT.Nutriboost 7
Node *x = c[i], *y = b == 2 ? x : bool connected(int u, int v) { // are u, v in while (R > q.second) del(--R, 1);
x->c[h], *z = b ? y : x; the same tree? res[qi] = calc();
if ((y->p = p)) p->c[up()] = y; Node* nu = access(&node[u])->first(); }
c[i] = z->c[i ^ 1]; return nu == access(&node[v])->first(); return res;
if (b < 2) { } }
x->c[h] = y->c[h ^ 1]; void makeRoot(Node* u) { /// Move u to root of
y->c[h ^ 1] = x; represented tree. vi moTree(vector<array<int, 2>> Q, vector<vi>& ed, int
} access(u); root=0){
z->c[i ^ 1] = this; u->splay(); int N = sz(ed), pos[2] = {}, blk = 350; //
fix(); x->fix(); y->fix(); if(u->c[0]) { ~N/sqrt(Q)
if (p) p->fix(); u->c[0]->p = 0; vi s(sz(Q)), res = s, I(N), L(N), R(N), in(N),
swap(pp, y->pp); u->c[0]->flip ^= 1; par(N);
} u->c[0]->pp = u; add(0, 0), in[0] = 1;
void splay() { /// Splay this up to the root. u->c[0] = 0; auto dfs = [&](int x, int p, int dep, auto& f)
Always finishes without flip set. u->fix(); -> void {
for (pushFlip(); p; ) { } par[x] = p;
if (p->p) p->p->pushFlip(); } L[x] = N;
p->pushFlip(); pushFlip(); Node* access(Node* u) { /// Move u to root aux if (dep) I[x] = N++;
int c1 = up(), c2 = p->up(); tree. Return the root of the root aux tree. for (int y : ed[x]) if (y != p) f(y, x,
if (c2 == -1) p->rot(c1, 2); u->splay(); !dep, f);
else p->p->rot(c2, c1 != c2); while (Node* pp = u->pp) { if (!dep) I[x] = N++;
} pp->splay(); u->pp = 0; R[x] = N;
} if (pp->c[1]) { };
Node* first() { /// Return the min element of pp->c[1]->p = 0; dfs(root, -1, 0, dfs);
the subtree rooted at this, splayed to the pp->c[1]->pp = pp; } #define K(x) pii(I[x[0]] / blk, I[x[1]] ^ -(I[x[0]] /
top. pp->c[1] = u; pp->fix(); u = pp; blk & 1))
pushFlip(); } iota(all(s), 0);
return c[0] ? c[0]->first() : (splay(), return u; sort(all(s), [&](int s, int t){ return K(Q[s])
this); } < K(Q[t]); });
} }; for (int qi : s) rep(end,0,2) {
}; int &a = pos[end], b = Q[qi][end], i = 0;
#define step(c) { if (in[c]) { del(a, end); in[a] = 0;
struct LinkCut { } \
vector<Node> node; 2.12 Mo Queries else { add(c, end); in[c] = 1; } a =
LinkCut(int N) : node(N) {} c; }
while (!(L[b] <= L[a] && R[a] <= R[b]))
void link(int u, int v) { // add an edge (u, v) void add(int ind, int end) { ... } // add a[ind] (end = I[i++] = b, b = par[b];
assert(!connected(u, v)); 0 or 1) while (a != b) step(par[a]);
makeRoot(&node[u]); void del(int ind, int end) { ... } // remove a[ind] while (i--) step(I[i]);
node[u].pp = &node[v]; int calc() { ... } // compute current answer if (end) res[qi] = calc();
} }
void cut(int u, int v) { // remove an edge (u, vi mo(vector<pii> Q) { return res;
v) int L = 0, R = 0, blk = 350; // ~N/sqrt(Q) }
Node *x = &node[u], *top = &node[v]; vi s(sz(Q)), res = s;
makeRoot(top); x->splay(); #define K(x) pii(x.first/blk, x.second ^ -(x.first/blk
assert(top == (x->pp ?: x->c[0])); & 1))
if (x->pp) x->pp = 0; iota(all(s), 0); 2.13 Persistent DSU
else { sort(all(s), [&](int s, int t){ return K(Q[s])
x->c[0] = top->p = 0; < K(Q[t]); });
x->fix(); for (int qi : s) { // PersistentDSU
} pii q = Q[qi]; //
} while (L > q.first) add(--L, 0); // Notes:
while (R < q.second) add(R++, 1); // - this doesn’t support delete edge operation, so
while (L < q.first) del(L++, 0); isn’t enough to
PTIT.Nutriboost 9
n->l = pa.second; node[seg] += val; assert(0 <= l && l <= r && r < n);
n->recalc(); return; int k = trunc(log2(r - l + 1));
return {pa.first, n}; } return calc(ans[l][k], ans[r - (1 << k) +
} else { int mid = (l + r)/2; 1][k]);
auto pa = split(n->r, k - cnt(n->l) - if(p <= mid){ }
1); // and just "k" modify(2*seg + 1, l, mid, p, val); };
n->r = pa.first; }else{
n->recalc(); modify(2*seg + 2, mid + 1, r, p, val);
return {n, pa.second}; }
} node[seg] = node[2*seg + 1] + node[2*seg + 2];
} } 2.19 Trie
Node* merge(Node* l, Node* r) { int sum(int seg, int l, int r, int a, int b){
if (!l) return r; if(l > b || r < a) return 0; const int MN = 26; // size of alphabet
if (!r) return l; if(l >= a && r <= b) return node[seg]; const int MS = 100010; // Number of states.
if (l->y > r->y) { int mid = (l + r)/2;
l->r = merge(l->r, r); return sum(2*seg + 1, l, mid, a, b) + sum(2*seg + struct trie{
l->recalc(); 2, mid + 1, r, a, b); struct node{
return l; } int c;
} else { int a[MN];
r->l = merge(l, r->l); };
r->recalc();
return r; 2.18 Sparse Table node tree[MS];
} int nodes;
}
void clear(){
Node* ins(Node* t, Node* n, int pos) { template <typename T, typename func = function<T(const tree[nodes].c = 0;
auto pa = split(t, pos); T, const T)>> memset(tree[nodes].a, -1, sizeof tree[nodes].a);
return merge(merge(pa.first, n), pa.second); struct SparseTable { nodes++;
} func calc; }
int n;
// Example application: move the range [l, r) to index k vector<vector<T>> ans; void init(){
void move(Node*& t, int l, int r, int k) { nodes = 0;
Node *a, *b, *c; SparseTable() {} clear();
tie(a,b) = split(t, l); tie(b,c) = split(b, r - }
l); SparseTable(const vector<T>& a, const func& f) :
if (k <= l) t = merge(ins(a, b, k), c); n(a.size()), calc(f) { int add(const string &s, bool query = 0){
else t = merge(a, ins(c, b, k - r)); int last = trunc(log2(n)) + 1; int cur_node = 0;
} ans.resize(n); for(int i = 0; i < s.size(); ++i){
for (int i = 0; i < n; i++){ int id = gid(s[i]);
ans[i].resize(last); if(tree[cur_node].a[id] == -1){
} if(query) return 0;
2.17 Segment Tree for (int i = 0; i < n; i++){ tree[cur_node].a[id] = nodes;
ans[i][0] = a[i]; clear();
} }
#include <bits/stdc++.h> for (int j = 1; j < last; j++){ cur_node = tree[cur_node].a[id];
using namespace std; for (int i = 0; i <= n - (1 << j); i++){ }
ans[i][j] = calc(ans[i][j - 1], ans[i + if(!query) tree[cur_node].c++;
const int N = 1e5 + 10; (1 << (j - 1))][j - 1]); return tree[cur_node].c;
} }
int node[4*N]; }
} };
void modify(int seg, int l, int r, int p, int val){
if(l == r){ T query(int l, int r){
PTIT.Nutriboost 12
void dfs(vector<vi>& C, int v, int par) { active[ps[i].x] = i; cardinality bipartite mathching in G’.
time[v] = T++; }
for (int y : C[v]) if (y != par) { for (auto &p : ps) { // rotate
Therefore, the problem can be solved by finding the
path.push_back(v), if (rot & 1) p.x *= -1;
ret.push_back(time[v]); else swap(p.x, p.y); maximum cardinality matching in G’ instead.
dfs(C, y, v); } NOTE: If the paths are note necesarily disjoints, find
} } the transitive closure and solve the problem for disjoint
} return edges; paths.
}
int lca(int a, int b) {
if (a == b) return a; 5.16 Planar Graph (Euler)
tie(a, b) = minmax(time[a], time[b]);
return path[rmq.query(a, b)]; 5.14 Math Euler’s formula states that if a finite, connected, planar
} graph is drawn in the plane without any edge intersections,
//dist(a,b){return depth[a] + depth[b] - Number of Spanning Trees
and v is the number of vertices, e is the number of edges
2*depth[lca(a,b)];} Create an N × N matrix mat, and for each edge a →
}; and f is the number of faces (regions bounded by edges,
b ∈ G, do mat[a][b]--, mat[b][b]++ (and mat[b][a]--,
including the outer, infinitely large region), then:
mat[a][a]++ if G is undirected). Remove the ith row and
column and take the determinant; this yields the number
5.13 Manhattan MST f +v =e+2
of directed spanning trees rooted at i (if G is undirected,
remove any row/column). It can be extended to non connected planar graphs with
struct point { Erdős–Gallai theorem c connected components:
long long x, y; A simple graph with node degrees d1 ≥ · · · ≥ dn exists iff
}; d1 + · · · + dn is even and for every k = 1 . . . n, f +v =e+c+1
// Returns a list of edges in the format (weight, u, v). k
X n
X
// Passing this list to Kruskal algorithm will give the di ≤ k(k − 1) + min(di , k). 5.17 Push Relabel
Manhattan MST. i=1 i=k+1
vector<tuple<long long, int, int>>
manhattan_mst_edges(vector<point> ps) { struct PushRelabel {
vector<int> ids(ps.size()); 5.15 Minimum Path Cover in DAG struct Edge {
iota(ids.begin(), ids.end(), 0); int dest, back;
vector<tuple<long long, int, int>> edges; Given a directed acyclic graph G = (V, E), we are to find ll f, c;
for (int rot = 0; rot < 4; rot++) { // for every the minimum number of vertex-disjoint paths to cover each };
rotation vertex in V. vector<vector<Edge>> g;
sort(ids.begin(), ids.end(), [&](int i, int j){ vector<ll> ec;
return (ps[i].x + ps[i].y) < (ps[j].x + We can construct a bipartite graph G′ = (V out ∪ vector<Edge*> cur;
ps[j].y); V in, E ′ ) from G, where : vector<vi> hs; vi H;
}); PushRelabel(int n) : g(n), ec(n), cur(n),
map<int, int, greater<int>> active; // (xs, id) hs(2*n), H(n) {}
for (auto i : ids) {
for (auto it = active.lower_bound(ps[i].x); V out = {v ∈ V : v has positive out − degree} void addEdge(int s, int t, ll cap, ll rcap=0) {
it != active.end(); if (s == t) return;
active.erase(it++)) { g[s].push_back({t, sz(g[t]), 0, cap});
int j = it->second; V in = {v ∈ V : v has positive in − degree} g[t].push_back({s, sz(g[s])-1, 0, rcap});
if (ps[i].x - ps[i].y > ps[j].x - }
ps[j].y) break;
E ′ = {(u, v) ∈ V out × V in : (u, v) ∈ E}
assert(ps[i].x >= ps[j].x && ps[i].y >= Then it can be shown, via König’s theorem, that G’ void addFlow(Edge& e, ll f) {
ps[j].y); Edge &back = g[e.dest][e.back];
edges.push_back({(ps[i].x - ps[j].x) + has a matching of size m if and only if there exists n − m if (!ec[e.dest] && f)
(ps[i].y - ps[j].y), i, j}); vertex-disjoint paths that cover each vertex in G, where hs[H[e.dest]].push_back(e.dest);
} n is the number of vertices in G and m is the maximum e.f += f; e.c -= f; ec[e.dest] += f;
PTIT.Nutriboost 20
5.20 Topological Sort if (dep[u] < dep[v]) swap(u, v); int tot; // total special vertices
int d = dep[u] - dep[v]; ll ans;
for (int i = K - 1; i >= 0; --i) void solve(int u, int p) {
vi topoSort(const vector<vi>& gr) { if (d & (1 << i)) for (int v : adj_vt[u]) {
vi indeg(sz(gr)), ret; u = up[i][u]; if (v == p) continue;
for (auto& li : gr) for (int x : li) indeg[x]++; } solve(v, u);
queue<int> q; // use priority_queue for lexic. if (u == v) return u; sz[u] = (sz[u] + sz[v]) % MOD;
largest ans. for (int i = K - 1; i >= 0; --i) { }
rep(i,0,sz(gr)) if (indeg[i] == 0) q.push(i); if (up[i][u] != up[i][v]) {
while (!q.empty()) { u = up[i][u]; for (int v : adj_vt[u]) {
int i = q.front(); // top() for priority v = up[i][v]; if (v == p) continue;
queue } int w = dep[v] - dep[u];
ret.push_back(i); } int mul = 1LL * sz[v] * (tot - sz[v] + MOD) %
q.pop(); return up[0][u]; MOD;
for (int x : gr[i]) } ans += 1LL * w * mul % MOD;
if (--indeg[x] == 0) q.push(x); ans %= MOD;
} bool inside(int u, int v) { }
return ret; return st[u] <= st[v] && en[v] <= en[u]; }
} }
/// signed main() {
vector <int> adj_vt[N]; cin.tie(0) -> sync_with_stdio(0);
int vt_root(vector <int> &ver) {
5.21 Virtual Tree sort(ver.begin(), ver.end(), [&] (const int& x, #ifdef JASPER
const int& y) { freopen("in1", "r", stdin);
return st[x] < st[y]; #endif
/* });
Used to solve problem with set of vertices int m = ver.size(); int n, q;
https://www.hackerrank.com/contests/hourrank-15/challenges/kittys-calculations-on-a-tree
for (int i = 0; i + 1 < m; ++i) { cin >> n >> q;
*/ int new_ver = lca(ver[i], ver[i + 1]);
ver.push_back(new_ver); for (int i = 1; i < n; ++i) {
const int MOD = 1e9 + 7; } int u, v;
const int N = 2e5 + 5; sort(ver.begin(), ver.end(), [&] (const int& x, cin >> u >> v;
const int K = 18; const int& y) { adj[u].push_back(v);
return st[x] < st[y]; adj[v].push_back(u);
vector <int> adj[N]; }); }
int st[N], en[N], dep[N]; ver.resize(unique(ver.begin(), ver.end()) -
int up[K][N]; ver.begin()); dfs(1, 0);
int timer = 0;
stack <int> stk; for (int _q = 1; _q <= q; ++_q) {
// LCA stk.push(ver[0]); int k;
void dfs(int u, int p) { m = ver.size(); cin >> k;
st[u] = ++timer; for (int i = 1; i < m; ++i) {
for (int v : adj[u]) { int u = ver[i]; vector <int> ver;
if (v == p) continue; while (!stk.empty() && !inside(stk.top(), u)) tot = 0;
dep[v] = dep[u] + 1; // check if v is in u’s subtree while (k--) {
up[0][v] = u; stk.pop(); int x; cin >> x;
for (int i = 1; i < K; ++i) adj_vt[stk.top()].push_back(u); sz[x] = x;
up[i][v] = up[i - 1][up[i - 1][v]]; stk.push(u); tot = (tot + x) % MOD;
dfs(v, u); } ver.push_back(x);
} return ver[0]; }
en[u] = timer; }
} int rt = vt_root(ver);
int lca(int u, int v) { int sz[N]; solve(rt, 0);
if (dep[u] != dep[v]) {
PTIT.Nutriboost 22
} } int permToInt(vector<int>& v) {
fill(B.begin() + b.size(), B.begin() + sz, base{0, vector<int> ans = pow(a, n / 2); int use = 0, i = 0, r = 0;
0}); int res = 0; for(int x : v) r = r * ++i +
fft(B, sz); for(auto x: ans) res = (res + 1LL * x * x % mod) % __builtin_popcount(use & -(1<<x)),
} mod; use |= 1 << x; // (note:
double ratio = 0.25 / sz; cout << res << ’\n’; minus, not ~!)
base r2(0, - 1), r3(ratio, 0), r4(0, - ratio), r5(0, return 0; return r;
1); } }
for(int i = 0; i <= (sz >> 1); i++) { //https://codeforces.com/contest/1096/problem/G
int j = (sz - i) & (sz - 1);
base a1 = (A[i] + conj(A[j])), a2 = (A[i] -
conj(A[j])) * r2; 7.10 Sigma Function
base b1 = (B[i] + conj(B[j])) * r3, b2 = (B[i] - 7.8 Others
conj(B[j])) * r4; The Sigma Function is defined as:
if(i != j) { Cycles Let gS (n) be the number of n-permutations whose
base c1 = (A[j] + conj(A[i])), c2 = (A[j] - cycle lengths all belong to the set S. Then σx (n) =
X
dx
conj(A[i])) * r2;
∞
! d|n
base d1 = (B[j] + conj(B[i])) * r3, d2 = (B[j] - X xn X xn
conj(B[i])) * r4; gS (n) = exp when x = 0 is called the divisor function, that counts
A[i] = c1 * d1 + c2 * d2 * r5; n=0
n! n∈S
n
B[i] = c1 * d2 + c2 * d1; the number of positive divisors of n.
} Derangements Permutations of a set such that none Now, we are interested in find
A[j] = a1 * b1 + a2 * b2 * r5; of the elements appear in their original position.
B[j] = a1 * b2 + a2 * b1; X
} σ0 (d)
n!
fft(A, sz); fft(B, sz); D(n) = (n−1)(D(n−1)+D(n−2)) = nD(n−1)+(−1)n = d|n
vector<int> res(need); e
for(int i = 0; i < need; i++) { If n is written as prime factorization:
long long aa = A[i].x + 0.5; Burnside’s lemma Given a group G of symmetries and
k
long long bb = B[i].x + 0.5; a set X, the number of elements of X up to symmetry equals Y e
long long cc = A[i].y + 0.5; n= Pi k
res[i] = (aa + ((bb % mod) << 15) + ((cc % mod) << 1 X g i=1
|X |,
30))%mod; |G| g∈G We can demonstrate that:
}
return res;
} where X g are the elements fixed by g (g.x = x). X k
Y
If f (n) counts “configurations” (of some sort) of length σ0 (d) = g(ek + 1)
vector<int> pow(vector<int>& a, int p) { n, we can ignore rotational symmetry using G = Zn to get d|n i=1
vector<int> res;
res.emplace_back(1); n−1 where g(x) is the sum of the first x positive numbers:
1X 1X
while(p) { g(n) = f (gcd(n, k)) = f (k)ϕ(n/k).
if(p & 1) res = multiply(res, a); n n
k=0 k|n g(x) = (x ∗ (x + 1))/2
a = multiply(a, a, 1);
p >>= 1;
} 7.9 Permutation To Int
return res; 8 Misc
}
int main() { /** 8.1 Dates
int n, k; cin >> n >> k; * Description: Permutation -> integer conversion. (Not
vector<int> a(10, 0); order preserving.)
while(k--) { * Integer -> permutation can use a lookup table. //
int m; cin >> m; * Time: O(n) // Time - Leap years
a[m] = 1; **/ //
PTIT.Nutriboost 26
// A[i] has the accumulated number of days from months 8.2 Debugging Tricks • for (int x = m; x; ) { --x &= m; ... } loops
previous to i over all subset masks of m (except m itself).
const int A[13] = { 0, 0, 31, 59, 90, 120, 151, 181, • signal(SIGSEGV, [](int) { _Exit(0); }); con-
212, 243, 273, 304, 334 }; verts segfaults into Wrong Answers. Similarly one • c = x&-x, r = x+c; (((r^x) >> 2)/c) | r is the
// same as A, but for a leap year
can catch SIGABRT (assertion failures) and SIGFPE next number after x with the same number of bits
const int B[13] = { 0, 0, 31, 60, 91, 121, 152, 182,
213, 244, 274, 305, 335 }; (zero divisions). _GLIBCXX_DEBUG failures generate set.
// returns number of leap years up to, and including, y SIGABRT (or SIGSEGV on gcc 5.4.0 apparently).
int leap_years(int y) { return y / 4 - y / 100 + y / • rep(b,0,K) rep(i,0,(1 << K))
400; }
• feenableexcept(29); kills the program on NaNs (1), if (i & 1 << b) D[i] += D[i^(1 << b)]; com-
bool is_leap(int y) { return y % 400 == 0 || (y % 4 == putes all sums of subsets.
0 && y % 100 != 0); } 0-divs (4), infinities (8) and denormals (16).
// number of days in blocks of years
const int p400 = 400*365 + leap_years(400); 8.4.2 Pragmas
const int p100 = 100*365 + leap_years(100); 8.3 Interval Container
const int p4 = 4*365 + 1; • #pragma GCC optimize ("Ofast") will make GCC auto-
const int p1 = 365; vectorize loops and optimizes floating points better.
int date_to_days(int d, int m, int y) set<pii>::iterator addInterval(set<pii>& is, int L, int
R) {
{ • #pragma GCC target ("avx2") can double performance
return (y - 1) * 365 + leap_years(y - 1) + if (L == R) return is.end();
auto it = is.lower_bound({L, R}), before = it; of vectorized code, but causes crashes on old machines.
(is_leap(y) ? B[m] : A[m]) + d;
} while (it != is.end() && it->first <= R) {
void days_to_date(int days, int &d, int &m, int &y) R = max(R, it->second); • #pragma GCC optimize ("trapv") kills the program on
{ before = it = is.erase(it); integer overflows (but is really slow).
bool top100; // are we in the top 100 years of a 400 }
block? if (it != is.begin() && (--it)->second >= L) {
bool top4; // are we in the top 4 years of a 100 L = min(L, it->first); 8.5 Ternary Search
block? R = max(R, it->second);
bool top1; // are we in the top year of a 4 block? is.erase(it);
} template<class F>
return is.insert(before, {L,R}); int ternSearch(int a, int b, F f) {
y = 1; assert(a <= b);
top100 = top4 = top1 = false; }
while (b - a >= 5) {
void removeInterval(set<pii>& is, int L, int R) { int mid = (a + b) / 2;
y += ((days-1) / p400) * 400; if (f(mid) < f(mid+1)) a = mid; // (A)
d = (days-1) % p400 + 1; if (L == R) return;
auto it = addInterval(is, L, R); else b = mid+1;
auto r2 = it->second; }
if (d > p100*3) top100 = true, d -= 3*p100, y += 300; rep(i,a+1,b+1) if (f(a) < f(i)) a = i; // (B)
else y += ((d-1) / p100) * 100, d = (d-1) % p100 + 1; if (it->first == L) is.erase(it);
else (int&)it->second = L; return a;
if (R != r2) is.emplace(R, r2); }
if (d > p4*24) top4 = true, d -= 24*p4, y += 24*4;
else y += ((d-1) / p4) * 4, d = (d-1) % p4 + 1; }
// Finds prime numbers between a and b, using basic 10.1.2 Exponential distribution
primes up to sqrt(b) 9.16 Totient Sieve
// a must be greater than 1. The time between events in a Poisson process is Exp(λ), λ >
vector<long long> seg_sieve(long long a, long long b) 0.
λe−λx x ≥ 0
{
long long ant = a; for (int i = 1; i < MN; i++) f (x) =
phi[i] = i; 0 x<0
a = max(a, 3LL);
vector<bool> pmap(b - a + 1); 1 2 1
for (int i = 1; i < MN; i++) µ= ,σ = 2
long long sqrt_b = sqrt(b); λ λ
for (int i = 0; i < num_p; ++i) { if (!sieve[i]) // is prime
long long p = primes[i]; for (int j = i; j < MN; j += i)
phi[j] -= phi[j] / i; 10.1.3 Normal distribution
if (p > sqrt_b) break;
long long j = (a + p - 1) / p; Most real random values with mean µ and variance σ 2 are
for (long long v = (j == 1) ? p + p : j * p; v <= well described by N (µ, σ 2 ), σ > 0.
b; v += p) {
pmap[v - a] = true; 1 (x−µ)2
−
} f (x) = √ e 2σ2
} 2πσ 2
vector<long long> ans;
if (ant == 2) ans.push_back(2); 9.17 Totient If X1 ∼ N (µ1 , σ12 ) and X2 ∼ N (µ2 , σ22 ) then
int start = a % 2 ? 0 : 1;
aX1 + bX2 + c ∼ N (µ1 + µ2 + c, a2 σ12 + b2 σ22 )
for (int i = start, I = b - a + 1; i < I; i += 2)
if (pmap[i] == false) long long totient(long long n) {
ans.push_back(a + i); if (n == 1) return 0; 10.2 Discrete Distributions
return ans; long long ans = n;
} for (int i = 0; primes[i] * primes[i] <= n; ++i) { 10.2.1 Binomial distribution
if ((n % primes[i]) == 0) {
vector<pair<int, int>> factor(int n) { while ((n % primes[i]) == 0) n /= primes[i]; The number of successes in n independent yes/no exper-
vector<pair<int, int>> ans; ans -= ans / primes[i]; iments, each which yields success with probability p is
if (n == 0) return ans; } Bin(n, p), n = 1, 2, . . . , 0 ≤ p ≤ 1.
for (int i = 0; primes[i] * primes[i] <= n; ++i) { } !
if ((n % primes[i]) == 0) { if (n > 1) { n k
int expo = 0; ans -= ans / n; p(k) = p (1 − p)n−k
while ((n % primes[i]) == 0) { } k
expo++; return ans;
n /= primes[i]; } µ = np, σ 2 = np(1 − p)
}
Bin(n, p) is approximately Po(np) for small p.
PTIT.Nutriboost 31
10.2.2 First success distribution (A "addq %%rdx, %0\n adcq $0,%0" : "+a"(r) : Node *next[Alphabets];
B); return r; } int sum;
The number of trials needed to get the first success in inde- OP(+,,"d"(o.x)) OP(*,"mul %1\n", "r"(o.x) : Node() : fail(NULL), next{}, sum(0) { }
pendent yes/no experiments, each wich yields success with "rdx") };
probability p is Fs(p), 0 ≤ p ≤ 1. H operator-(H o) { return *this + ~o.x; }
ull get() const { return x + !~x; } struct String {
bool operator==(H o) const { return get() == string str;
p(k) = p(1 − p)k−1 , k = 1, 2, . . .
o.get(); } int sign;
1 2 1−p bool operator<(H o) const { return get() < };
µ= ,σ = o.get(); }
p p2 }; public:
static const H C = (ll)1e11+3; // (order ~ 3e9; random //totalLen = sum of (len + 1)
10.2.3 Poisson distribution also ok) void init(int totalLen) {
nodes.resize(totalLen);
The number of events occurring in a fixed period of time t struct HashInterval { nNodes = 0;
if these events occur with a known average rate κ and inde- vector<H> ha, pw; strings.clear();
HashInterval(string& str) : ha(sz(str)+1), roots.clear();
pendently of the time since the last event is Po(λ), λ = tκ. pw(ha) { sizes.clear();
pw[0] = 1; que.resize(totalLen);
λk
p(k) = e−λ , k = 0, 1, 2, . . . rep(i,0,sz(str)) }
k! ha[i+1] = ha[i] * C + str[i],
pw[i+1] = pw[i] * C; void insert(const string &str, int sign) {
µ = λ, σ 2 = λ } strings.push_back(String{ str, sign });
H hashInterval(int a, int b) { // hash [a, b) roots.push_back(nodes.data() + nNodes);
return ha[b] - ha[a] * pw[b - a]; sizes.push_back(1);
10.3 Probability Theory } nNodes += (int)str.size() + 1;
}; auto check = [&]() { return sizes.size() > 1 &&
Let X be a discrete random variable with probability pX (x) sizes.end()[-1] == sizes.end()[-2]; };
of assuming the valueP x. It will then have an expected value vector<H> getHashes(string& str, int length) { if(!check())
(mean) µ = E(X) = Px xpX (x) and variance σ 2 = V (X) = if (sz(str) < length) return {}; makePMA(strings.end() - 1, strings.end(),
E(X 2 ) − (E(X))2 = x (x − E(X))2 pX (x) where σ is the H h = 0, pw = 1; roots.back(), que);
rep(i,0,length) while(check()) {
standard deviation. If X is instead continuous it will have a h = h * C + str[i], pw = pw * C; int m = sizes.back();
probability density function fX (x) and the sums above will vector<H> ret = {h}; roots.pop_back();
instead be integrals with pX (x) replaced by fX (x). rep(i,length,sz(str)) { sizes.pop_back();
Expectation is linear: ret.push_back(h = h * C + str[i] - pw * sizes.back() += m;
str[i-length]); if(!check())
E(aX + bY ) = aE(X) + bE(Y ) } makePMA(strings.end() - m * 2, strings.end(),
return ret; roots.back(), que);
} }
For independent X and Y ,
}
H hashString(string& s){H h{}; for(char c:s)
V (aX + bY ) = a2 V (X) + b2 V (Y ). h=h*C+c;return h;} int match(const string &str) const {
int res = 0;
for(const Node *t : roots)
11 Strings res += matchPMA(t, str);
11.2 Incremental Aho Corasick return res;
11.1 Hashing }
class IncrementalAhoCorasic { private:
struct H { static const int Alphabets = 26; static void makePMA(vector<String>::const_iterator
typedef uint64_t ull; static const int AlphabetBase = ’a’; begin, vector<String>::const_iterator end, Node
ull x; H(ull x=0) : x(x) {} struct Node { *nodes, vector<Node*> &que) {
#define OP(O,A,B) H operator O(H o) { ull r = x; asm \ Node *fail;
PTIT.Nutriboost 32