﻿
// 2018/8/22: 首个记录，基于V4 2018/8/14
// 2018/11/12: 增加fromGeneralSample
// 2018/11/23: 修正fromGeneralSample时间戳
// 2019/9/27: 扩充目标物类别
// 2019/10/11: 修改pragma once和include
// 2019/12/25: 添加若干目标物种类
// 2025/4/8： 修正color字段解析

#ifndef OBJ_SENSOR_SAMPLE_V5_H
#define OBJ_SENSOR_SAMPLE_V5_H

#include "spadas.h"

namespace obj_sensor_sample_v5
{
	using namespace spadas;

	enum PositionMode
	{
		PM_ClosestPoint = 1, // 最近点
		PM_BoxCenter = 2, // 框中心
	};

	enum ObjectClass
	{
		OC_General = 1, // 一般物体大类
		OC_Car = 2, // 车辆大类
		OC_Ped = 3, // 行人大类
		OC_Truck = 4, // 货车大类
		OC_Bike = 5, // Bike大类
		OC_RailCar = 6, // 轨道车辆大类
		OC_Special = 7, // 特殊物体大类
		OC_RoadStatic = 8, // 道路内静态物体大类
		OC_SideStatic = 9, // 道路外静态物体大类

		OC_GeneralSmall = 11, // 一般小物体
		OC_GeneralBig = 12, // 一般大物体
		OC_Barrier = 13, // 一般障碍物

		OC_Van = 21, // 面包车
		OC_Minibus = 22, // 小巴
		OC_Bus = 23, // 大巴
		OC_BatteryCart = 24, // 园区电瓶车
		OC_TinyCar = 25, // 微型车
		OC_SUV = 26, // SUV

		OC_Adult = 31, // 成人
		OC_Child = 32, // 小孩
		OC_Scooter = 33, // 平衡车
		OC_WheelChair = 34, // 轮椅

		OC_Minitruck = 41, // 小卡车
		OC_ContainerTruck = 42, // 货柜车
		OC_SpecialCar = 43, // 特种车辆
		OC_Trailer = 44, // 拖车

		OC_Motorbike = 51, // 摩托车
		OC_Bicycle = 52, // 自行车
		OC_ElectricBike = 53, // 电瓶自行车
		OC_Tricycle = 54, // 三轮车

		OC_Train = 61, // 火车
		OC_Tram = 62, // 有轨电车

		OC_Animal = 71, // 动物
		OC_Ball = 72, // 球类
		OC_Litter = 73, // 垃圾等杂物

		OC_Cone = 81, // 锥形路障
		OC_ManholeCover = 82, // 井盖
		OC_Patch = 83, // 路面补丁
		OC_Gantry = 84, // 龙门架

		OC_Pole = 91, // 竖杆
		OC_Tree = 92, // 树木
		OC_Vegetation = 93, // 灌木
		OC_Building = 94, // 建筑物
	};

	struct ObjectColor
	{
		Bool valid;
		Byte r; //目标物的颜色R分量
		Byte g; //目标物的颜色g分量
		Byte b; //目标物的颜色b分量

		ObjectColor() : valid(FALSE), r(0), g(0), b(0)
		{}
	};

	struct ObjectInfo
	{
		// 跟踪信息
		Int id; // 目标物ID
		Int age; // 目标物的Age
		OptionalInt publicID; // 目标物的原始ID
		OptionalInt publicAge; // 目标物的原始Age

		// 时间偏差
		OptionalInt timeOffset; // 目标物的时间偏置 us

		// 颜色信息
		ObjectColor color; // 目标物的颜色

		// 类别信息
		OptionalInt classID; // 目标物的类别ID
		ObjectClass classs; // 目标物的类型
		OptionalDouble classConfidence; // 目标物的分类置信度

		// 位置信息(pos根据PositionMode意义不同，cp专指ClosestPoint)
		PositionMode posMode; // 目标物的位置模式
		Double posx; // 目标物的x轴方向位置 m
		Double posy; // 目标物的y轴方向位置 m
		OptionalDouble posxSigma; // 目标物x轴方向位置的精度 m
		OptionalDouble posySigma; // 目标物y轴方向位置的精度 m
		Double cpx; // 目标物的最近点x轴坐标 m
		Double cpy; // 目标物的最近点y轴坐标 m
		Double cpd; // 目标物的最近点与本车轮廓距离 m

		// 长宽/朝向/轨迹
		OptionalDouble width; // 目标物的宽度 m
		OptionalDouble length; // 目标物的长度 m
		OptionalDouble heading; // 目标物的朝向 deg
		Array<Point2D> trajectory; // 目标物轨迹点的列表

		// 速度/加速度
		OptionalDouble vxRel; // x轴方向相对速度 KPH
		OptionalDouble vxAbs; // x轴方向绝对速度 KPH
		OptionalDouble vyRel; // y轴方向相对速度 KPH
		OptionalDouble vyAbs; // y轴方向绝对速度 KPH
		OptionalDouble axRel; // x轴方向相对加速度 m/s²
		OptionalDouble axAbs; // x轴方向绝对加速度 m/s²
		OptionalDouble ayRel; // y轴方向相对加速度 m/s²
		OptionalDouble ayAbs; // y轴方向绝对加速度 m/s²

		// 轮廓信息
		Array<Point2D> contour; // 目标物轮廓点的列表

		// 指标信息
		OptionalDouble ttc1; // <前向、后向> 碰撞时间 [sec]
		OptionalDouble ttc2; // <前向、后向> 碰撞时间（考虑加速度）[sec]
		OptionalDouble thw; // <前向> 车间时距 [sec]
		OptionalDouble pdca; // <前向> Potential防碰撞最小减速度 [m/s^2]
		OptionalDouble odca; // <前向> Overt防碰撞最小减速度 [m/s^2]
		OptionalDouble overlap; // <前向> 与前车重叠比例 [%]
		OptionalDouble lc; // <侧向> 横向间距 [m]
		OptionalDouble rc; // <后向> 后向间距 [m]

		ObjectInfo()
		{
			id = 0;
			age = 0;
			classs = OC_General;
			posMode = PM_ClosestPoint;
			posx = posy = 0;
			cpx = cpy = cpd = 0;
		}
	};

	struct ObjectSensorFov
	{
		Double positionX; // FOV中心点x轴坐标 m
		Double positionY; // FOV中心的y轴坐标 m
		Double angleRange; // FOV的角度范围 deg
		Double orientation; // FOV中轴线朝向角 deg
		Double distanceRange; // FOV探测距离范围 m
		Double blindRange; // FOV盲区范围 m

		ObjectSensorFov()
		{
			positionX = 0;
			positionY = 0;
			angleRange = 90;
			orientation = 0;
			distanceRange = 100;
			blindRange = 0;
		}
	};

	struct ObjectSensorSample
	{
		GlobalTimestamp timeStamp; // 时间戳

		// 目标物和关键目标物序号
		Array<ObjectInfo> objects; // 目标物列表
		OptionalInt koFrontIndex; // 前向关键目标序号
		OptionalInt koLeftIndex; // 左侧关键目标序号
		OptionalInt koRightIndex; // 右侧关键目标序号

		// 参考车辆信息
		OptionalDouble vehiVX;// 本车车速 KPH
		OptionalDouble vehiCurv; // 本车曲率 1/m
		OptionalDouble vehiWidth; // 本车宽度 m
		OptionalDouble vehiLength; // 本车长度 m

		// FOV信息
		Array<ObjectSensorFov> fovs; // FOV列表

		static String getProtocolName(UInt channel)
		{
			return (String)"obj-sensor-sample-v5@" + channel;
		}

		static String getTitle()
		{
			return "Object count,Trajectory point count,Contour point count,Key object index (CIPV),Key object index (left-side),Key object index (right-side),Vehicle speed[KPH],Vehicle curvature[1/m],Vehicle width[m],Vehicle length[m],FOV count,First object's ID,Object's age,Object's raw ID,Object's raw age,Object's class ID,Object's class,Object's position mode,Object's position-x[m],Object's position-y[m], Object's closest point-x[m], Object's closest point-y[m],Object's closest point distance[m],Object's width[m],Object's length[m],Object's heading[°],Object's relative velocity-x[KPH],Object's absolute velocity-x[KPH],Object's relative velocity-y[KPH],Object's absolute velocity-y[KPH],Object's relative acceleration-x[m/s2],Object's absolute acceleration-x[m/s2],Object's relative acceleration-y[m/s2],Object's absolute acceleration-y[m/s2],Object's TTC-1st[s],Object's TTC-2nd[s],Object's time headway[s],Object's potential DCA[m/s2],Object's overt DCA[m/s2],Object's overlap[%],Object's lateral clearance[m],Object's rear clearance[m],Trajectory offset,Trajectory count,Contour offset,Contour count,Object's time offset[us],Object's color(R-component),Object's color(G-component),Object's color(B-component),Object class's confidence,Object's position-x sigma,Object's position-y sigma,Second object's ID,etc.";
		}

		Bool fromGeneralSample(GeneralSample sample)
		{
			Array<OptionalDouble> v = sample.values;
			if (v.size() < 11) return FALSE;

			if (!v[0].valid ||
				!v[1].valid ||
				!v[2].valid ||
				!v[10].valid) return FALSE;

			Int nObjects = (Int)v[0].value;
			Int nTrajectories = (Int)v[1].value;
			Int nContours = (Int)v[2].value;
			Int nFovs = (Int)v[10].value;

			Int sizeWithExtra = 11 + nObjects * 42 + nFovs * 6 + (nTrajectories + nContours) * 2;
			Int sizeWithoutExtra = 11 + nObjects * 42 + nFovs * 6;
			if (v.size() != sizeWithExtra && v.size() != sizeWithoutExtra) return false;

			timeStamp = sample.timeStamp;

			koFrontIndex = v[3].valid ? (Int)v[3].value : OptionalInt();
			koLeftIndex = v[4].valid ? (Int)v[4].value : OptionalInt();
			koRightIndex = v[5].valid ? (Int)v[5].value : OptionalInt();

			vehiVX = v[6];
			vehiCurv = v[7];
			vehiWidth = v[8];
			vehiLength = v[9];

			Int objBase = 11;
			Int fovBase = objBase + 42 * nObjects;
			Int trajBase = fovBase + 6 * nFovs;
			Int contBase = trajBase + 2 * nTrajectories;

			objects = Array<ObjectInfo>(nObjects);
			for (Int i = 0; i < nObjects; i++)
			{
				ObjectInfo& obj = objects[i];
				UInt baseIndex = objBase + 42 * i;

				obj.id = (Int)v[baseIndex + 0].value;
				obj.age = (Int)v[baseIndex + 1].value;
				obj.publicID = v[baseIndex + 2].valid ? (Int)v[baseIndex + 2].value : OptionalInt();
				obj.publicAge = v[baseIndex + 3].valid ? (Int)v[baseIndex + 3].value : OptionalInt();
				obj.classID = v[baseIndex + 4].valid ? (Int)v[baseIndex + 4].value : OptionalInt();
				obj.classs = (ObjectClass)(Int)v[baseIndex + 5].value;
				obj.posMode = (PositionMode)(Int)v[baseIndex + 6].value;
				obj.posx = v[baseIndex + 7].value;
				obj.posy = v[baseIndex + 8].value;
				obj.cpx = v[baseIndex + 9].value;
				obj.cpy = v[baseIndex + 10].value;
				obj.cpd = v[baseIndex + 11].value;
				obj.width = v[baseIndex + 12];
				obj.length = v[baseIndex + 13];
				obj.heading = v[baseIndex + 14];
				obj.vxRel = v[baseIndex + 15];
				obj.vxAbs = v[baseIndex + 16];
				obj.vyRel = v[baseIndex + 17];
				obj.vyAbs = v[baseIndex + 18];
				obj.axRel = v[baseIndex + 19];
				obj.axAbs = v[baseIndex + 20];
				obj.ayRel = v[baseIndex + 21];
				obj.ayAbs = v[baseIndex + 22];
				obj.ttc1 = v[baseIndex + 23];
				obj.ttc2 = v[baseIndex + 24];
				obj.thw = v[baseIndex + 25];
				obj.pdca = v[baseIndex + 26];
				obj.odca = v[baseIndex + 27];
				obj.overlap = v[baseIndex + 28];
				obj.lc = v[baseIndex + 29];
				obj.rc = v[baseIndex + 30];
				obj.timeOffset = v[baseIndex + 35].valid ? (Int)v[baseIndex + 35].value : OptionalInt();
				obj.color.r = v[baseIndex + 36].valid ? (Byte)v[baseIndex + 36].value : 0;
				obj.color.g = v[baseIndex + 37].valid ? (Byte)v[baseIndex + 37].value : 0;
				obj.color.b = v[baseIndex + 38].valid ? (Byte)v[baseIndex + 38].value : 0;
				obj.color.valid = v[baseIndex + 36].valid && v[baseIndex + 37].valid && v[baseIndex + 38].valid;
				obj.classConfidence = v[baseIndex + 39];
				obj.posxSigma = v[baseIndex + 40];
				obj.posySigma = v[baseIndex + 41];

				Int trajOffset = (Int)v[baseIndex + 31].value;
				Int trajSize = (Int)v[baseIndex + 32].value;
				Int contOffset = (Int)v[baseIndex + 33].value;
				Int contSize = (Int)v[baseIndex + 34].value;

				if (v.size() == sizeWithExtra)
				{
					obj.trajectory = Array<Point2D>(trajSize);
					for (int n = 0; n < trajSize; n++)
					{
						obj.trajectory[n].x = (Float)v[trajBase + 2 * trajOffset + 2 * n].value;
						obj.trajectory[n].y = (Float)v[trajBase + 2 * trajOffset + 2 * n + 1].value;
					}

					obj.contour = Array<Point2D>(contSize);
					for (int n = 0; n < contSize; n++)
					{
						obj.contour[n].x = (Float)v[contBase + 2 * contOffset + 2 * n].value;
						obj.contour[n].y = (Float)v[contBase + 2 * contOffset + 2 * n + 1].value;
					}
				}
				else
				{
					obj.trajectory = Array<Point2D>();
					obj.contour = Array<Point2D>();
				}
			}

			fovs = Array<ObjectSensorFov>(nFovs);
			for (int i = 0; i < nFovs; i++)
			{
				ObjectSensorFov& fov = fovs[i];
				Int baseIndex = fovBase + 6 * i;

				fov.positionX = v[baseIndex].value;
				fov.positionY = v[baseIndex + 1].value;
				fov.orientation = v[baseIndex + 2].value;
				fov.angleRange = v[baseIndex + 3].value;
				fov.distanceRange = v[baseIndex + 4].value;
				fov.blindRange = v[baseIndex + 5].value;
			}

			return TRUE;
		}
		GeneralSample toGeneralSample()
		{
			GeneralSample sample;
			sample.timeStamp = timeStamp;

			UInt trajectoryPoints = 0;
			UInt contourPoints = 0;
			Array<UInt> trajectoryOffset(objects.size());
			Array<UInt> trajectorySize(objects.size());
			Array<UInt> contourOffset(objects.size());
			Array<UInt> contourSize(objects.size());
			for (UInt i = 0; i < objects.size(); i++)
			{
				trajectoryOffset[i] = trajectoryPoints;
				trajectorySize[i] = objects[i].trajectory.size();
				trajectoryPoints += trajectorySize[i];
				contourOffset[i] = contourPoints;
				contourSize[i] = objects[i].contour.size();
				contourPoints += contourSize[i];
			}

			sample.values = Array<OptionalDouble>(11 + objects.size() * 42 + fovs.size() * 6 + (trajectoryPoints + contourPoints) * 2);
			OptionalDouble *data = sample.values.data();
			
			UInt objBase = 11;
			UInt fovBase = objBase + objects.size() * 42;
			UInt trajectoryBase = fovBase + fovs.size() * 6;
			UInt contourBase = trajectoryBase + trajectoryPoints * 2;

			data[0] = (Double)objects.size();
			data[1] = (Double)trajectoryPoints;
			data[2] = (Double)contourPoints;
			data[10] = (Double)fovs.size();

			data[3] = koFrontIndex.valid ? (Double)koFrontIndex.value : OptionalDouble();
			data[4] = koLeftIndex.valid ? (Double)koLeftIndex.value : OptionalDouble();
			data[5] = koRightIndex.valid ? (Double)koRightIndex.value : OptionalDouble();

			data[6] = vehiVX;
			data[7] = vehiCurv;
			data[8] = vehiWidth;
			data[9] = vehiLength;

			for (UInt i = 0; i < objects.size(); i++)
			{
				ObjectInfo& src = objects[i];
				UInt baseIndex = objBase + 42 * i;
				data[baseIndex + 0] = (Double)src.id;
				data[baseIndex + 1] = (Double)src.age;
				data[baseIndex + 2] = src.publicID.valid ? (Double)src.publicID.value : OptionalDouble();
				data[baseIndex + 3] = src.publicAge.valid ? (Double)src.publicAge.value : OptionalDouble();
				data[baseIndex + 4] = src.classID.valid ? (Double)src.classID.value : OptionalDouble();
				data[baseIndex + 5] = (UInt)src.classs;
				data[baseIndex + 6] = (UInt)src.posMode;
				data[baseIndex + 7] = src.posx;
				data[baseIndex + 8] = src.posy;
				data[baseIndex + 9] = src.cpx;
				data[baseIndex + 10] = src.cpy;
				data[baseIndex + 11] = src.cpd;
				data[baseIndex + 12] = src.width;
				data[baseIndex + 13] = src.length;
				data[baseIndex + 14] = src.heading;
				data[baseIndex + 15] = src.vxRel;
				data[baseIndex + 16] = src.vxAbs;
				data[baseIndex + 17] = src.vyRel;
				data[baseIndex + 18] = src.vyAbs;
				data[baseIndex + 19] = src.axRel;
				data[baseIndex + 20] = src.axAbs;
				data[baseIndex + 21] = src.ayRel;
				data[baseIndex + 22] = src.ayAbs;
				data[baseIndex + 23] = src.ttc1;
				data[baseIndex + 24] = src.ttc2;
				data[baseIndex + 25] = src.thw;
				data[baseIndex + 26] = src.pdca;
				data[baseIndex + 27] = src.odca;
				data[baseIndex + 28] = src.overlap;
				data[baseIndex + 29] = src.lc;
				data[baseIndex + 30] = src.rc;
				data[baseIndex + 31] = (Double)trajectoryOffset[i];
				data[baseIndex + 32] = (Double)trajectorySize[i];
				data[baseIndex + 33] = (Double)contourOffset[i];
				data[baseIndex + 34] = (Double)contourSize[i];
				data[baseIndex + 35] = src.timeOffset.valid ? (Double)src.timeOffset.value : OptionalDouble();
				data[baseIndex + 36] = src.color.valid ? (Double)src.color.r : OptionalDouble();
				data[baseIndex + 37] = src.color.valid ? (Double)src.color.g : OptionalDouble();
				data[baseIndex + 38] = src.color.valid ? (Double)src.color.b : OptionalDouble();
				data[baseIndex + 39] = src.classConfidence;
				data[baseIndex + 40] = src.posxSigma;
				data[baseIndex + 41] = src.posySigma;

				UInt dataOffset = trajectoryBase + trajectoryOffset[i] * 2;
				for (UInt n = 0; n < trajectorySize[i]; n++)
				{
					Point2D& pt = src.trajectory[n];
					data[dataOffset + 2 * n] = pt.x;
					data[dataOffset + 2 * n + 1] = pt.y;
				}

				dataOffset = contourBase + contourOffset[i] * 2;
				for (UInt n = 0; n < contourSize[i]; n++)
				{
					Point2D& pt = src.contour[n];
					data[dataOffset + 2 * n] = pt.x;
					data[dataOffset + 2 * n + 1] = pt.y;
				}
			}

			for (UInt i = 0; i < fovs.size(); i++)
			{
				ObjectSensorFov& src = fovs[i];
				UInt baseIndex = fovBase + 6 * i;
				data[baseIndex + 0] = src.positionX;
				data[baseIndex + 1] = src.positionY;
				data[baseIndex + 2] = src.orientation;
				data[baseIndex + 3] = src.angleRange;
				data[baseIndex + 4] = src.distanceRange;
				data[baseIndex + 5] = src.blindRange;
			}

			sample.significantCount = trajectoryBase;
			return sample;
		}
	};
}

#endif