//
// Created by nvidia on 2021/4/28.
//
#include "additions.h"
#include "Setter.h"

extern rm::Setter rm_setter;

int makeDir(const std::string &filename_prefix,char *&p_cMkdir)
{
    p_cMkdir = new char[160];
    time_t TimeValue;
    time (&TimeValue);
    char temp[144];
    strftime(temp, sizeof(temp), "%Y-%m-%d_%H_%M_%S",localtime(&TimeValue));

    strcpy(p_cMkdir,filename_prefix.c_str());
    strcat(p_cMkdir,temp);
    printf(p_cMkdir);

    int isCreate = mkdir(p_cMkdir,S_IRUSR | S_IWUSR | S_IXUSR | S_IRWXG | S_IRWXO);

    if( !isCreate )
        printf("create path:%s\n",p_cMkdir);
    else
        printf("create path failed! error code : %s \n",p_cMkdir);
    return isCreate;
}

cv::VideoWriter initVideoWriter(const std::string &filename_prefix) {
    cv::VideoWriter video;

    time_t TimeValue;
    time (&TimeValue);
    char tmp[144];

#ifdef  SAVE_VIDEO_PATH
    strftime(tmp, sizeof(tmp), SAVE_VIDEO_PATH"%Y-%m-%d_%H_%M_%S.avi",localtime(&TimeValue));
    video.open(tmp, CV_FOURCC('M', 'J', 'P', 'G'), 50, cv::Size( PICTURE_COL_NUM, PICTURE_ROW_NUM), true);
#endif

    return video;
}


//extern cv::VideoWriter video_writer;
//void saveVideos(const cv::Mat &src) {
//	if (!src.empty()) {
//#ifdef SAVE_VIDEO
//		video_writer<<src;
////		cv::waitKey(1);
//#endif
//	}
//}

void showOrigin(const cv::Mat &src) {
    if (!src.empty()) {
        imshow("origin", src);
    }
    else return;
}

extern int last_fps;
void showFps(cv::Mat &src) {
    static systime first_fps_time=0;
    static int fps=0;
    char show_fps[100];
    time_t t = time(nullptr);//得到秒数
    static systime now;
//    first_fps_time=now;
    getsystime(now);
    //LOGM("T:%lf",getTimeIntervalms(now,first_fps_time));
    fps += 1;
    if (getTimeIntervalms(now,first_fps_time) >= 1000) {
        last_fps = fps;
        getsystime(first_fps_time);
        fps = 0;
    }

//

    sprintf(show_fps, "fps:%d", last_fps);
    cv::putText(src, show_fps, cv::Point(10, 30), cv::FONT_HERSHEY_TRIPLEX, 1, cv::Scalar(0, 0, 255));
}

bool videoExtract(cv::Mat &src) {//图像预处理，将视频切成 PICTURE_COL_NUM, PICTURE_ROW_NUM的大小
    if (src.empty()) return false;
    float length = static_cast<float>(src.cols);
    float width = static_cast<float>(src.rows);
    float picture_col_num = PICTURE_COL_NUM;
    float picture_row_num = PICTURE_ROW_NUM;
    if (length / width > picture_col_num / picture_row_num) {
        length *= picture_row_num / width;
        resize(src, src, cv::Size(length, int(picture_row_num)));
        src = src(cv::Rect((length - int(picture_col_num)) / 2, 0, int(picture_col_num), int(picture_row_num)));
    }
    else {
        width *= picture_col_num / length;
        resize(src, src, cv::Size(int(picture_col_num), width));
        src = src(cv::Rect(0, (width - int(picture_row_num)) / 2, int(picture_col_num), int(picture_row_num)));
    }
    return true;
}

bool cameraExtract(cv::Mat &src) {//图像预处理，将视频切成 PICTURE_COL_NUM, PICTURE_ROW_NUM的大小
    if (src.empty()) return false;
    float length = static_cast<float>(src.cols);
    float width = static_cast<float>(src.rows);
    float picture_col_num = PICTURE_COL_NUM;
    float picture_row_num = PICTURE_ROW_NUM;
    src = src(cv::Rect((length - int(picture_col_num)) / 2, (width - int(picture_row_num)) / 2, int(picture_col_num), int(picture_row_num)));
    return true;
}

double getPointLength(const cv::Point2f &p) {
    return sqrt(p.x * p.x + p.y * p.y);
}

double getPointLength(const cv::Point3f &p) {
    return sqrt(p.x * p.x + p.y * p.y+p.z*p.z);
}

double getPointLength(const cv::Point2f &p1,const cv::Point2f &p2) {
    cv::Point2f p=p1-p2;
    return sqrt(p.x * p.x + p.y * p.y);
}

double getPointLength(const cv::Point3f &p1,const cv::Point3f &p2) {
    cv::Point3f p=p1-p2;
    return sqrt(p.x * p.x + p.y * p.y+p.z*p.z);
}

cv::Point2f angleToPixel(cv::Point2f pyangle){
    float tempY=tan((pyangle.x) / ANGLE_PER_RAD) * FOCUS * FOCUS_PIXAL_8MM +IMAGE_CENTER_Y;
    float tempX=tan((pyangle.y) / ANGLE_PER_RAD) * FOCUS * FOCUS_PIXAL_8MM +IMAGE_CENTER_X;
    return cv::Point2f(tempX,tempY);
}

/*!
 * 将像素坐标转化为对应角度
 * @param point 像素坐标
 * @return
 */
cv::Point2f pixelToAngle(cv::Point2f point){
    float pitch=atan((point.y-IMAGE_CENTER_Y) /(FOCUS_PIXAL_8MM*FOCUS))*ANGLE_PER_RAD;
    float yaw=atan((point.x-IMAGE_CENTER_X) /(FOCUS_PIXAL_8MM*FOCUS))*ANGLE_PER_RAD;
    return cv::Point2f(pitch,yaw);
}

void calcRotMatrix(Eigen::Matrix<float,3,3> &R_vec,Eigen::Matrix<float,3,3> &R_back,const cv::Point2f pyangle){
    float alpha=-pyangle.x;//up is zheng
    float beta=-pyangle.y;//right is zheng
    float gamma=0;
    Eigen::Matrix<float,3,3> x_Rvec,y_Rvec,z_Rvec;
    float sin_a=sin(alpha/ANGLE_PER_RAD),cos_a=cos(alpha/ANGLE_PER_RAD),sin_b=sin(beta/ANGLE_PER_RAD),cos_b=cos(beta/ANGLE_PER_RAD),sin_g=sin(gamma/ANGLE_PER_RAD),cos_g=cos(gamma/ANGLE_PER_RAD);
//    std::cout<<"alpha:"<<alpha<<" beta:"<<beta<<std::endl;
//    std::cout<<"sin(a):"<<sin_a<<" cos(a):"<<cos_a<<std::endl;
//    std::cout<<"sin(b):"<<sin_b<<" cos(b):"<<cos_b<<std::endl;
    z_Rvec<<
          cos_g,          -sin_g,                  0,
            sin_g,           cos_g,                  0,
            0,               0,                  1;
    x_Rvec<<
          1,               0,                  0,
            0,           cos_a,              -sin_a,
            0,           sin_a,              cos_a;
    y_Rvec<<
          cos_b,               0,              sin_b,
            0,               1,                  0,
            -sin_b,               0,              cos_b;
    R_vec=z_Rvec*y_Rvec*x_Rvec;
    R_back=R_vec.transpose();
}

/*!
 * 世界坐标系解算 相对坐标——世界坐标:
 * +补偿相机距离转轴的偏移
 * @param refer_coor
 * @return 世界坐标：cv::Point3f(x,y,z)
 */
cv::Point3f worldCoordinateResolver(cv::Point3f refer_coor, Eigen::Matrix<float,3,3> R_vec,float chassis_location){
    Eigen::Vector3f Tvec,World_coor;

    float tx = rm_setter.camera2ptz.at<TYPE_POS>(0,0)+ chassis_location;//x
    float ty = rm_setter.camera2ptz.at<TYPE_POS>(0,1);//y
    float tz = rm_setter.camera2ptz.at<TYPE_POS>(0,2);//z
    Tvec<<tx,ty,tz;
    Eigen:: Vector3f Refer_coor ;
    Refer_coor<<refer_coor.x,refer_coor.y,refer_coor.z;
    World_coor= R_vec*Refer_coor-Tvec;
//    char info[500];
//    sprintf(info, "tx=%.1f(%.1f), ty=%.1f, tz=%0.1f", World_coor[0],rm_setter.camera2ptz.at<TYPE_POS>(0,0),World_coor[1],World_coor[2]);
//    putText(show_pic, info, cv::Point(50, PICTURE_ROW_NUM - 50), cv::FONT_HERSHEY_TRIPLEX, 0.8,cv::Scalar(20, 20, 40));
    return cv::Point3f(World_coor[0],World_coor[1],World_coor[2]);
}

/*!
 * 世界坐标逆变换——相对坐标
 * @param world_coor
 * @return 相对坐标 cv::Point3f(x,y,z)
 */
cv::Point3f antiWorldCoordinateResolver(cv::Point3f world_coor,Eigen::Matrix<float,3,3> R_back,float chassis_location){
    Eigen::Vector3f Tvec,Refer_coor,World_coor;
    World_coor<<world_coor.x,world_coor.y,world_coor.z;
    float tx = rm_setter.camera2ptz.at<TYPE_POS>(0,0)+ chassis_location;//x
    float ty = rm_setter.camera2ptz.at<TYPE_POS>(0,1);//y
    float tz = rm_setter.camera2ptz.at<TYPE_POS>(0,2);//z
    Tvec<<tx,ty,tz;
    Refer_coor=R_back*(World_coor+Tvec);
    return cv::Point3f(Refer_coor[0],Refer_coor[1],Refer_coor[2]);
}

/*!
 * 相对坐标——角度
 * @param refer_coor
 * @return cv::Point2f(pitch,yaw)
 */
cv::Point2f referCoordinate2Angle(const cv::Point3f &refer_coor){
    float pitch=atan(refer_coor.y/refer_coor.z)*ANGLE_PER_RAD;
    float yaw=atan(refer_coor.x/refer_coor.z)*ANGLE_PER_RAD;
//    cv::circle(show_pic, angleToPixel(cv::Point2f(pitch,yaw)),2,COLOR_RED,-1);
    return cv::Point2f(pitch,yaw);
}

/*!
 *
 * @param internal 单位：ms
 * @return
 */
float time2frame(systime internal){
    return internal/(1000.0f/float(last_fps));
}

/*********************************************************
buff:  要编辑的uint_8t
pos:   要写入的位置（单位：位）,when pos=0,changed the last bit
value：写入的数据
*********************************************************/
void bitsWrite(uint8_t* buff, int pos, uint8_t value)
{
    pos=7-pos;
    uint8_t index[] =
            {
                    0x80,       0x40,       0x20,       0x10,       0x8,       0x4,       0x2,       0x1,
            };

    int byteLeft = (pos)%8;
    buff[0] = buff[0] & ~(index[byteLeft]);//save old left bits data,and let the changed bit be 0
    uint32_t curBit = value;
    uint8_t bitValue;
    int moveBits = pos-7;
    if(moveBits >=0)
        bitValue = curBit >> moveBits;
    else
        bitValue = curBit << -moveBits;
    buff[0] = buff[0] | bitValue;
}