HDU 6447

动态开点线段树+dp。
题目转换成求\(x_i\)<\(x_j\)&&\(y_i\)<\(y_j\)的前提下能取得的最大值。
排序第一维,第二维利用线段树进行dp,dp[i]表示第i个结束,能取得最大值。

#include<iostream>
#include<cstring>
#include<algorithm>
#include<cmath>
#include<cstdlib>
#include<climits>
#include<stack>
#include<vector>
#include<queue>
#include<set>
#include<bitset>
#include<map>
//#include<regex>
#include<cstdio>
#include <iomanip>
#pragma GCC optimize(2)
#define up(i,a,b)  for(int i=a;i<b;i++)
#define dw(i,a,b)  for(int i=a;i>b;i--)
#define upd(i,a,b) for(int i=a;i<=b;i++)
#define dwd(i,a,b) for(int i=a;i>=b;i--)
//#define local
typedef long long ll;
typedef unsigned long long ull;
const double esp = 1e-6;
const double pi = acos(-1.0);
const int INF = 0x3f3f3f3f;
const int inf = 1e9;
using namespace std;
ll read()
{
	char ch = getchar(); ll x = 0, f = 1;
	while (ch<'0' || ch>'9') { if (ch == '-')f = -1; ch = getchar(); }
	while (ch >= '0' && ch <= '9') { x = x * 10 + ch - '0'; ch = getchar(); }
	return x * f;
}
typedef pair<int, int> pir;
#define lson l,mid,root<<1
#define rson mid+1,r,root<<1|1
#define lrt root<<1
#define rrt root<<1|1
int T, n;
const int N = 1e5 + 10;
ll dp[N];
struct SEG {
	int tot;
	int root;
	int ls[N * 20];
	int rs[N * 20];
	int maxx[N * 20];
	void init()
	{
		tot = 0;
		root = 0;
		memset(ls, 0, sizeof(ls));
		memset(rs, 0, sizeof(rs));
		memset(maxx, 0, sizeof(maxx));
	}
	void update(int &o, int l, int r, int pos, int val)
	{
		if (!o)o = ++tot;
		maxx[o] = max(maxx[o], val);
		if (l == r)return;
		int mid = (l + r) >> 1;
		if (mid >= pos)update(ls[o], l, mid, pos, val);
		else update(rs[o], mid + 1, r, pos, val);
	}
	int query(int o, int l, int r, int lf, int rt) {
		if (rt < lf)return 0;
		if (!o)return 0;
		if (lf <= l && r <= rt) {
			return maxx[o];
		}
		int mid = (l + r) >> 1;
		int ans = 0;
		if (lf <= mid)ans = max(ans, query(ls[o], l, mid, lf, rt));
		if (rt > mid)ans = max(ans, query(rs[o], mid + 1, r, lf, rt));
		return ans;
	}
}segt;
struct node {
	int x, y, v;
	bool operator<(const node a)const {
		return x < a.x;
	}
}a[N];
int lf[N], rt[N];
int main()
{
	T = read();
	while (T--)
	{
		n = read();
		segt.init();
		upd(i, 1, n) {
			a[i].x = read(); a[i].y = read(); a[i].v = read();
			lf[i] = rt[i] = i;
		}
		sort(a + 1, a + 1 + n);
		dwd(i, n-1, 1)
		{
			if (a[i].x == a[i+1].x)
			{
				rt[i] = rt[i + 1];
			}
		}
		for (int i = 1; i <= n;)
		{
			vector<int>vec;
			for (int j = lf[i]; j <= rt[i]; j++)
			{
				int qans = segt.query(segt.root, 0, 1e9, 0, a[j].y - 1);
				vec.push_back(qans);
			}
			int cnt = 0;
			for (int j = lf[i]; j <= rt[i]; j++)
			{
				segt.update(segt.root, 0, 1e9, a[j].y, vec[cnt]+a[j].v);
				cnt++;
			}
			i = rt[i] + 1;
		}
		printf("%d\n", segt.maxx[segt.root]);
	}
	return 0;
}
上一篇:linux QA


下一篇:C语言实现简单计算器小项目