跳转至

重链剖分

约 324 个字 394 行代码 预计阅读时间 6 分钟

简介

重子节点是子节点中子树最大的子节点,多个子树最大的子节点,取其一,没有子节点就没有重子节点,轻子节点是剩余子节点

节点到其重子节点为重边,到其轻子节点为轻边

若干首尾连接的重边构成重链。特别地,落单点也算重链。

整棵树被分成若干条重链,每个点仅属于一条重链,有些边不在重链里

重链上的点dfs序连续

子树上的点dfs序连续

根节点到每个节点,最多 \log n 条重链,或者 \log n 条轻边

连续dfs序,使得树上点权问题转化为序列问题,边权需要挂靠在深度更深的点变成点权

讲课链接

板子

点权

//W和revW,原点权,按dfs序的点权
//区别于普通线段树,唯一修改build函数
//默认树以1为根
//dfs1处理重儿子,子树大小,深度,父节点
//dfs2处理重链链顶,点的dfs序
//为了减少和线段树模块的耦合度,把原点权按dfs序重排
//querySum 查询路径u-v上所有点的点权合并信息,具体和线段树有关
//update   修改路径u-v上所有点的点权,具体和线段树有关
const int N = 5e4 + 3;
ll W[N], revW[N];
struct edge
{
    int v, nxt;
};
struct SegmentTree
{
    int segL[N << 2], segR[N << 2];
    ll sum[N << 2], lazy[N << 2];
    void pushup(int id)
    {
        sum[id] = sum[id << 1] + sum[id << 1 | 1];
    }
    void pushdown(int id)
    {
        if (!lazy[id])
            return;
        lazy[id << 1] += lazy[id];
        lazy[id << 1 | 1] += lazy[id];
        sum[id << 1] += lazy[id] * (segR[id << 1] - segL[id << 1] + 1);
        sum[id << 1 | 1] += lazy[id] * (segR[id << 1 | 1] - segL[id << 1 | 1] + 1);
        lazy[id] = 0;
    }
    void build(int id, int l, int r)
    {
        segL[id] = l;
        segR[id] = r;
        if (l == r)
        {
            sum[id] = revW[l];
            return;
        }
        int mid = (l + r) >> 1;
        build(id << 1, l, mid);
        build(id << 1 | 1, mid + 1, r);
        pushup(id);
    }
    void update(int id, int L, int R, ll val)
    {
        if (L <= segL[id] && segR[id] <= R)
        {
            sum[id] += val * (segR[id] - segL[id] + 1);
            lazy[id] += val;
            return;
        }
        int mid = (segL[id] + segR[id]) >> 1;
        pushdown(id);
        if (L <= mid)
            update(id << 1, L, R, val);
        if (R > mid)
            update(id << 1 | 1, L, R, val);
        pushup(id);
    }
    ll query(int id, int L, int R)
    {
        if (L <= segL[id] && segR[id] <= R)
            return sum[id];
        int mid = (segL[id] + segR[id]) >> 1;
        pushdown(id);
        ll res = 0;
        if (L <= mid)
            res += query(id << 1, L, R);
        if (R > mid)
            res += query(id << 1 | 1, L, R);
        return res;
    }
    void init(int n)
    {
        memset(lazy, 0, sizeof(lazy[0]) * (n << 2));
        build(1, 1, n);
    }
};
struct heavyPathDecomposition
{
    int edgeTot, n, dfsOrder, head[N];
    edge e[N << 1];
    int son[N], siz[N], dep[N], fa[N];
    int top[N], dfn[N];
    SegmentTree st;
    void init(int nn)
    {
        n = nn;
        memset(head, 0, sizeof(int) * (n + 1));
        memset(son, -1, sizeof(int) * (n + 1));
        edgeTot = 0;
        dfsOrder = 0;
    }
    void init2()
    {
        dep[1] = 1;
        dfs1(1);
        dfs2(1, 1);
        for (int i = 1; i <= n; ++i)
            revW[dfn[i]] = W[i];
        st.init(n);
    }
    void addEdge(int u, int v)
    {
        e[++edgeTot] = {v, head[u]};
        head[u] = edgeTot;
    }
    void dfs1(int u)
    {
        siz[u] = 1;
        int v;
        for (int i = head[u]; i; i = e[i].nxt)
        {
            v = e[i].v;
            if (v == fa[u])
                continue;
            dep[v] = dep[u] + 1;
            fa[v] = u;
            dfs1(v);
            siz[u] += siz[v];
            if (son[u] == -1 || siz[v] > siz[son[u]])
                son[u] = v;
        }
    }
    void dfs2(int u, int Top)
    {
        top[u] = Top;
        dfn[u] = ++dfsOrder;
        if (son[u] == -1)
            return;
        dfs2(son[u], Top);
        for (int i = head[u]; i; i = e[i].nxt)
        {
            if (e[i].v != son[u] && e[i].v != fa[u])
                dfs2(e[i].v, e[i].v);
        }
    }
    ll querySum(int u, int v)
    {
        ll res = 0;
        while (top[u] != top[v])
        {
            if (dep[top[u]] < dep[top[v]])
                swap(u, v);
            res += st.query(1, dfn[top[u]], dfn[u]);
            u = fa[top[u]];
        }
        if (dfn[u] > dfn[v])
            swap(u, v);
        return res + st.query(1, dfn[u], dfn[v]);
    }
    void update(int u, int v, ll x)
    {
        while (top[u] != top[v])
        {
            if (dep[top[u]] < dep[top[v]])
                swap(u, v);
            st.update(1, dfn[top[u]], dfn[u], x);
            u = fa[top[u]];
        }
        if (dfn[u] > dfn[v])
            swap(u, v);
        st.update(1, dfn[u], dfn[v], x);
    }
} hp;
hp.init(n);
hp.addEdge(u,v);
hp.init2();
hp.update(u,v,x);
hp.querySum(u,v);

边权

//把边权固定到边相连的两个点中,深度更深的那个点,就转化为点权
//dfsOrder初始化为-1,因为1为根时,它没有点权
//match是第i条边所固定的点
//只有n-1条边,所以线段树是n-1
//同一条重链上,两点之间的边不包括lca固定的那条边,所以+1,因此还有可能越界,加个判断
const int N = 1e5 + 3;
ll revW[N];
struct edge
{
    int v, nxt, id;
    ll w;
};
struct SegmentTree
{
    int segL[N << 2], segR[N << 2];
    ll sum[N << 2];
    void pushup(int id)
    {
        sum[id] = sum[id << 1] + sum[id << 1 | 1];
    }
    void build(int id, int l, int r)
    {
        segL[id] = l;
        segR[id] = r;
        if (l == r)
        {
            sum[id] = revW[l];
            return;
        }
        int mid = (l + r) >> 1;
        build(id << 1, l, mid);
        build(id << 1 | 1, mid + 1, r);
        pushup(id);
    }
    void update(int id, int pos, ll val)
    {
        if (segL[id] == segR[id])
        {
            sum[id] += val;
            return;
        }
        if (pos <= ((segL[id] + segR[id]) >> 1))
            update(id << 1, pos, val);
        else
            update(id << 1 | 1, pos, val);
        pushup(id);
    }
    ll query(int id, int L, int R)
    {
        if (L <= segL[id] && segR[id] <= R)
            return sum[id];
        int mid = (segL[id] + segR[id]) >> 1;
        ll res = 0;
        if (L <= mid)
            res += query(id << 1, L, R);
        if (R > mid)
            res += query(id << 1 | 1, L, R);
        return res;
    }
};
struct heavyPathDecomposition
{
    int edgeTot, n, dfsOrder, head[N];
    edge e[N << 1];
    int son[N], siz[N], dep[N], fa[N];
    int top[N], dfn[N];
    int match[N];
    ll W[N];
    SegmentTree st;
    void init(int nn)
    {
        n = nn;
        memset(head, 0, sizeof(int) * (n + 1));
        memset(son, -1, sizeof(int) * (n + 1));
        edgeTot = 0;
        dfsOrder = -1;
    }
    void init2()
    {
        dep[1] = 1;
        dfs1(1);
        dfs2(1, 1);
        for (int i = 2; i <= n; ++i)
            revW[dfn[i]] = W[i];
        st.build(1, 1, n - 1);
    }
    void addEdge(int u, int v, int id, ll w)
    {
        e[++edgeTot] = {v, head[u],id,w};
        head[u] = edgeTot;
    }
    void dfs1(int u)
    {
        siz[u] = 1;
        int v;
        for (int i = head[u]; i; i = e[i].nxt)
        {
            v = e[i].v;
            if (v == fa[u])
                continue;
            dep[v] = dep[u] + 1;
            fa[v] = u;
            match[e[i].id] = v;
            W[v] = e[i].w;
            dfs1(v);
            siz[u] += siz[v];
            if (son[u] == -1 || siz[v] > siz[son[u]])
                son[u] = v;
        }
    }
    void dfs2(int u, int Top)
    {
        top[u] = Top;
        dfn[u] = ++dfsOrder;
        if (son[u] == -1)
            return;
        dfs2(son[u], Top);
        for (int i = head[u]; i; i = e[i].nxt)
        {
            if (e[i].v != son[u] && e[i].v != fa[u])
                dfs2(e[i].v, e[i].v);
        }
    }
    ll querySum(int u, int v)
    {
        ll res = 0;
        while (top[u] != top[v])
        {
            if (dep[top[u]] < dep[top[v]])
                swap(u, v);
            res += st.query(1, dfn[top[u]], dfn[u]);
            u = fa[top[u]];
        }
        if (dfn[u] > dfn[v])
            swap(u, v);
        if (u != v)
            res += st.query(1, dfn[u] + 1, dfn[v]);
        return res;
    }
    void update(int i, ll x)
    {
        st.update(1, dfn[match[i]], x);
    }
} hp;
hp.init(n);
hp.addEdge(u,v,i,w);
hp.init2();
hp.update(i,x);
hp.querySum(u,v);

lca

const int N = 5e4 + 3;
struct edge
{
    int v, nxt;
};
struct heavyPathDecomposition
{
    int edgeTot,head[N];
    edge e[N<<1];
    int son[N], siz[N], dep[N], fa[N];
    int top[N];
    void init(int n)
    {
        memset(head, 0, sizeof(int) * (n + 1));
        memset(son, -1, sizeof(int) * (n + 1));
        edgeTot = 0;
    }
    void init2()
    {
        dep[1] = 1;
        dfs1(1);
        dfs2(1,1);
    }
    void addEdge(int u, int v)
    {
        e[++edgeTot] = {v, head[u]};
        head[u] = edgeTot;
    }
    void dfs1(int u)
    {
        siz[u] = 1;
        int v;
        for (int i = head[u]; i; i = e[i].nxt)
        {
            v = e[i].v;
            if (v == fa[u])
                continue;
            dep[v] = dep[u] + 1;
            fa[v] = u;
            dfs1(v);
            siz[u] += siz[v];
            if (son[u] == -1 || siz[v] > siz[son[u]])
                son[u] = v;
        }
    }
    void dfs2(int u, int Top)
    {
        top[u] = Top;
        if (son[u] == -1)
            return;
        dfs2(son[u], Top);
        for (int i = head[u]; i; i = e[i].nxt)
        {
            if (e[i].v != son[u] && e[i].v != fa[u])
                dfs2(e[i].v, e[i].v);
        }
    }
    int lca(int u,int v)
    {
        while(top[u]!=top[v])
        {
            if(dep[top[u]]<dep[top[v]])
                swap(u,v);
            u=fa[top[u]];
        }
        return (dep[u]>dep[v]?v:u);
    }
}hp;
hp.init(n);
hp.addEdge(u,v);
hp.init2();
hp.lca(u,v);

hdu3966 点权,路径加法,单点查询

poj2763 边权,单边修改为x,路径和查询

poj3237 边权,单边修改为x,路径取相反数,路径最大值查询

loj10141 点权,路径覆盖,路径颜色段数量查询

黑暗爆炸1036 点权,单点修改,路径和,路径最大值查询

spoj qtree 边权,单边修改为x,路径最大值查询

loj1348 点权,单点修改,路径和