#include "Robot.h"
#include <sys/socket.h>
#include "base64.h"

float Affine[2][3];
double RobotCalibrationCoordinates[9][2] = {
    {785.712, -103.867},
    {785.626, -59.806},
    {783.977, -13.7028},
    {833.46, -103.652},
    {833.284, -59.1959},
    {833.22, -12.318},
    {879.12, -104.863},
    {878.863, -59.14540},
    {878.368, -13.4786}};

double CameraCalibrationCoordinates[9][2] = {
    {1042, 306}, {1040, 476}, {1036, 642}, {1208, 308}, {1206, 480}, {1202, 646}, {1376, 310}, {1372, 482}, {1370, 648}};

double RobotCalibrationZ, RobotCalibrationA, RobotCalibrationB, RobotCalibrationC;

bool update_Affine()
{
    // 真实世界的坐标
    std::vector<cv::Point2f> objectPoints;
    for (int i = 0; i < 9; i++)
    {
        objectPoints.push_back(cv::Point2f(RobotCalibrationCoordinates[i][0], RobotCalibrationCoordinates[i][1]));
    }

    // 相机拍摄得到的坐标
    std::vector<cv::Point2f> imagePoints;
    for (int i = 0; i < 9; i++)
    {
        imagePoints.push_back(cv::Point2f(CameraCalibrationCoordinates[i][0], CameraCalibrationCoordinates[i][1]));
    }

    cv::Mat affine;                                                        // 仿射变换矩阵定义
    estimateAffine2D(imagePoints, objectPoints).convertTo(affine, CV_32F); // 仿射变换

    if (affine.size() != AFFINE_SIZE)
    {
        std::cerr << "affine:" << affine.size() << std::endl;
        return false;
    }

    // 结果写入仿射变化矩阵：
    Affine[0][0] = affine.at<float>(0, 0); //             [ A  B  C ]
    Affine[0][1] = affine.at<float>(0, 1); //             [ D  E  F ]
    Affine[0][2] = affine.at<float>(0, 2); // 变换公式：
    Affine[1][0] = affine.at<float>(1, 0); //          [ A  B ] [x] + C = [x']
    Affine[1][1] = affine.at<float>(1, 1); //          [ D  E ] [y] + F = [y']
    Affine[1][2] = affine.at<float>(1, 2);

    // 计算标定偏差
    std::vector<cv::Point2f> points_Calc;
    double sumX = 0, sumY = 0;
    for (int i = 0; i < imagePoints.size(); i++)
    {
        cv::Point2f pt;
        pt.x = Affine[0][0] * imagePoints[i].x + Affine[0][1] * imagePoints[i].y + Affine[0][2];
        pt.y = Affine[1][0] * imagePoints[i].x + Affine[1][1] * imagePoints[i].y + Affine[1][2];
        points_Calc.push_back(pt);
        sumX += pow(objectPoints[i].x - points_Calc[i].x, 2);
        sumY += pow(objectPoints[i].y - points_Calc[i].y, 2);
    }

    double rmsX, rmsY;
    rmsX = sqrt(sumX / imagePoints.size());
    rmsY = sqrt(sumY / imagePoints.size());

    // 输出标定偏差
    std::cout << "Calibration Error:" << std::endl;
    std::cout << "rmsX:" << rmsX << std::endl;
    std::cout << "rmsY:" << rmsY << std::endl;
    return true;
}

void getRobotCalibration()
{
    std::vector<cv::Point2f> cameraCalib = getCameraCalibrations();
    if (cameraCalib.empty())
    {
        std::cerr << " 没有相机坐标 ! " << std::endl;
        return;
    }

    updateRobotCalibrations(cameraCalib);
}

void RobotMotion(double x, double y, const int &socketfd)
{
    Json::Value root;
    Json::StreamWriterBuilder builder;
    // 设置 setSorted() 方法为 false
    builder.settings_["sorted"] = false;
    root["robot"] = 1;
    root["targetMode"] = 0;
    Json::Value queryType; // 创建一个数组类型的值
    queryType["coord"] = "MCS";
    queryType["speed"] = 30;
    root["cfg"] = queryType; // 将数组类型的值赋值给 root["queryType"]
    Json::Value targetPos;
    targetPos.append(x);
    targetPos.append(y);
    targetPos.append(RobotCalibrationZ);
    targetPos.append(RobotCalibrationA);
    targetPos.append(RobotCalibrationB);
    targetPos.append(RobotCalibrationC);
    root["targetPos"] = targetPos;

    // 禁用末尾换行符
    builder.settings_["indentation"] = ""; // 设置缩进为空字符串
    std::string JsonString = Json::writeString(builder, root);

    std::vector<uint8_t> encodedData = EncodeMessage(Robot_Motion_Command, JsonString, Controller_Byte_Length);
    send(socketfd, encodedData.data(), encodedData.size(), 0);
}
