#include "ccalibration.hpp"
#include "cdetection.hpp"
#include "cipm.hpp"
#include "cservo.hpp"
#include "cuarthost.hpp"
#include "cudpsendpoints.hpp"
#include "cudpsendthread.hpp"
#include "cudpserverthread.hpp"
#include "cvisible_corrector.hpp"
#include "uart.h"

using namespace cv;
using namespace std;

bool SHOW = false;
bool WRITE = false;
bool DEBUG = false;
bool PROCESS_GRAY = false;
bool UDP_GRAY = true;

enum RunMode { eCalibration = 0, ePerspective = 1, eRecognition = 3, eDetection = 4, eModeEnd };
enum Source { eCamera, eVideo, eSourceEnd };

int gCameraID = 0;
int gSource = eCamera;
bool gUdpSend = false;
int gRunMode = eModeEnd;
int gControlMode = -1;
bool gFilterOpened = false;
int gWidth = 1920, gHeight = 1080, gPartNum = 0, gUdpImgFlag = 0;
int gTargetSideLength = 250;
int gTargetHeight = 400;
string gParamsFile = "params.xml";
string gCalibrationParamsFile = "calibration_params.xml";
string UdpImageIP = "";
string UdpPointsIP = "";
int UdpImagePort = 0;
int UdpPointsPort = 0;
float ImageHz = 0.0;

VideoCapture gInputCapture;
vector<string> gCameraStrs;
bool gImage_OK = true;

CDetection Detection;
CPerspective Perspective;
CCalibration Calibration;
CUdpSendThread UdpSendThread;
CUdpServerThread UdpServerThread;
// CTcpSendThread TcpSendThread;
CUdpSendPoints UdpSendPoints;
CUartHost UartHost;
CServo Servo;
CVisibleCorrector Corrector;

void SetImageSource(void);
Mat GetImage(void);
int GetRunMode(int control_mode);
int SwitchMode(int cur_mode, int control_mode);
bool CheckAddress(void);

void ReadParams() {
  FileStorage fs(gParamsFile, FileStorage::READ);  // Read the settings
  if (!fs.isOpened()) {
    cout << "[ERROR]Could not open the params file: \"" << gParamsFile << "\"" << endl;
  } else {
    cout << "[INFO]Successed to open the params file: \"" << gParamsFile << "\"" << endl;
    fs["ImageWidth"] >> gWidth;
    fs["ImageHeight"] >> gHeight;
    fs["UdpImageIP"] >> UdpImageIP;
    fs["UdpPointsIP"] >> UdpPointsIP;
    fs["UdpImagePort"] >> UdpImagePort;
    fs["UdpPointsPort"] >> UdpPointsPort;
    fs["ImageHz"] >> ImageHz;
    cout << "ImageWidth: " << gWidth << " ImageHeight: " << gHeight << " UdpImageIP: " << UdpImageIP
         << " UdpImagePort: " << UdpImagePort << " UdpPointsIP: " << UdpPointsIP << " UdpPointsPort: " << UdpPointsPort
         << " ImageHz: " << ImageHz << endl;
  }
  fs.release();  // close Settings file
}

void WriteParams() {
  FileStorage fs(gParamsFile, FileStorage::WRITE);
  fs << "ImageWidth" << gWidth;
  fs << "ImageHeight" << gHeight;
  fs << "TargetSideLength" << gTargetSideLength;
  fs << "TargetHeight" << gTargetHeight;
  fs.release();  // close Settings file
}

int main(int argc, char *argv[]) {
  if (!CheckAddress()) {
    std::cout << "Invilid exe!" << std::endl;
    return -1;
  }
  ReadParams();
  int frame_interval = int(30 / ImageHz);
  // set udp
  // send image with udp
  UdpSendThread.udp.SetAddr(UdpImageIP.data(), UdpImagePort);
  UdpSendThread.Start();

  // send points with udp
  const char *IPGroup[] = {UdpPointsIP.data(), "192.168.1.12", "192.168.1.13", "192.168.1.14", "192.168.1.15"};
  UdpSendPoints.SetAddr(IPGroup, UdpPointsPort);
  UdpSendPoints.UpdateParams(gWidth, gHeight, gTargetHeight, gTargetSideLength);

  // receive command with udp
  UdpServerThread.Start();

  // TCP
  // TcpSendThread.addr = IPGroup;
  // TcpSendThread.Start();

  // set uart
  UartHost.Open();
  Servo.Close();
  gFilterOpened = false;

  Mat template_img = imread("standard.jpg");
  if (template_img.empty()) {
    cout << "Failed to load template image" << endl;
  }
  Mat gray_template;
  cvtColor(template_img, gray_template, COLOR_BGR2GRAY);

  Mat img;
  Mat ori_img;
  Mat sub_img;
  cv::Rect rect;
  std::string img_Path;
  Mat H1;
  Mat H3;
  Mat H2_inv;
  vector<Point2f> pts;
  int pt_count = 0;
  long int frame_count = 0;
  CTimer whole_time("WHOLE");
  CTimer main_time("Main");
  CTimer corner_detection_time("Corner Detection");
  CTimer template_matching_time("Template Matching");
  CTimer convert_time("CONVERT");
  while (true) {
    SetImageSource();
    while (gImage_OK) {
      img = GetImage();
      if (!img.empty()) {
        // receive uart data
        int tmp_width = gWidth;
        int tmp_height = gHeight;
        int tmp_target_height = gTargetHeight;
        int tmp_target_side_length = gTargetSideLength;

        frame_count++;
        if (PROCESS_GRAY && img.channels() == 3) {
          cvtColor(img, img, COLOR_BGR2GRAY);
        }

        vector<Point2f> board_pts;
        vector<Point2f> points;
        vector<Point2f> initial_visible_points, initial_infrared_points, standard_points;
        FileStorage fs_read, fs_write;
        if (UdpServerThread.mMode == 1 || UdpServerThread.mMode == 3) gRunMode = eCalibration;
        if (UdpServerThread.mMode == 2) gRunMode = eRecognition;
        if (UdpServerThread.mMode == 0) gRunMode = eModeEnd;
        switch (gRunMode) {
          case eCalibration:
            if (!gFilterOpened) {
              Servo.Open();
              gFilterOpened = true;
            }
            points.clear();
            corner_detection_time.start();
            cout << "Calibration Start" << endl;
            points = Corrector.DetectCornerPoints(img);
            cout << "Calibration End" << endl;
            corner_detection_time.end();
            // 尝试使用模板匹配进行目标检测
            if(points.size() == 0){
              template_matching_time.start();
              points = Corrector.DetectTargetAndCorners(img, gray_template, false);
              template_matching_time.end();
            }
            if (points.size() >= 8) {
              cout << "Calibration Success" << endl;
              UartHost.SendCalibrationStatus(true);
              
              if (UdpServerThread.mMode == 1) {
                // Mode 1 的逻辑保持不变
                fs_read.open(gCalibrationParamsFile, FileStorage::READ);
                if (!fs_read.isOpened()) std::cerr << "Failed to open read:" << gCalibrationParamsFile << std::endl;
                fs_read["StandardPoints"] >> standard_points;
                fs_read.release();
                H1 = findHomography(UdpServerThread.mInfraredPoints, standard_points);

                // save initial infrared points
                // save initial visible points
                // save H1
                fs_write.open(gCalibrationParamsFile, FileStorage::WRITE);
                if (!fs_write.isOpened()) std::cerr << "Failed to open write:" << gCalibrationParamsFile << std::endl;
                fs_write << "StandardPoints" << standard_points;
                fs_write << "InfraredPoints" << UdpServerThread.mInfraredPoints;
                fs_write << "VisiblePoints" << points;
                fs_write << "H1" << H1;
                fs_write.release();
              } else if (UdpServerThread.mMode == 3) {
                // 读取出厂标定信息
                fs_read.open(gCalibrationParamsFile, FileStorage::READ);
                if (!fs_read.isOpened()) std::cerr << "Failed to open read:" << gCalibrationParamsFile << std::endl;
                fs_read["StandardPoints"] >> standard_points;
                fs_read["InfraredPoints"] >> initial_infrared_points;
                fs_read["VisiblePoints"] >> initial_visible_points;
                fs_read.release();
                // 使用原来的方式计算 H2_inv
                if (points.size() >= 4 && standard_points.size() >= 4) {
                  // 3.0是RANSAC算法中的重投影阈值(reprojection threshold)
                  // 该阈值用于判断一个点是否为内点，如果重投影误差大于该阈值则认为是外点
                  // 阈值越小筛选越严格，这里选择3.0像素作为合理值
                  H2_inv = findHomography(points, standard_points, RANSAC, 3.0);
                  if(DEBUG){
                    // 将当前检测到的点投影到标准图上
                    std::vector<cv::Point2f> projected_points;
                    cv::perspectiveTransform(points, projected_points, H2_inv);

                    // 在template_img上绘制投影后的点
                    Mat display_img = template_img.clone();
                    for(const auto& pt : projected_points) {
                      circle(display_img, pt, 3, Scalar(0,0,255), -1);
                    }
                    
                    // 保存结果图像
                    imwrite("projected_points.jpg", display_img);
                  }
                } else {
                  cout << "Error: Not enough points for homography calculation. Need at least 4 pairs." << endl;
                  cout << "Current points: " << points.size() << ", standard points: " << standard_points.size() << endl;
                  break;
                }
                // Mode 3 使用特征匹配
                bool use_feature_match = false;
                if(use_feature_match){
                  cout << "Feature Matching Start" << endl;
                  H2_inv = Corrector.FeatureMatching(img, points);
                  if (!H2_inv.empty()) {
                    cout << "Feature Matching Success" << endl;
                  } else {
                    cout << "Feature Matching Failed, using original method" << endl;
                  }
                }
                H3 = findHomography(initial_infrared_points, initial_visible_points);
                std::vector<cv::Point2f> dst_pts(initial_visible_points.size());
                cv::perspectiveTransform(initial_visible_points, dst_pts, H2_inv);
                H1 = findHomography(initial_infrared_points, dst_pts);

                // 保存结果
                fs_write.open(gCalibrationParamsFile, FileStorage::WRITE);
                if (!fs_write.isOpened()) std::cerr << "Failed to open write:" << gCalibrationParamsFile << std::endl;
                fs_write << "StandardPoints" << standard_points;
                fs_write << "InfraredPoints" << initial_infrared_points;
                fs_write << "VisiblePoints" << initial_visible_points;
                fs_write << "H1" << H1;
                fs_write << "RunTimeVisiblePoints" << points;
                fs_write << "H2_inv" << H2_inv;
                fs_write.release();
                Perspective.RunTimeVisiblePoints = points;
              }
              cout << "H1: " << H1 << endl;
              UdpSendPoints.SendH(H1);
              // gRunMode = SwitchMode(eCalibration, gControlMode);
              UdpServerThread.mMode = 0;
              if (gFilterOpened) {
                Servo.Close();
                gFilterOpened = false;
              }
            }
            break;
          case ePerspective:
            if (!gDistCoeffsInited) {
              Calibration.InitDistCoeffs();
            }
            if (!gFilterOpened) {
              Servo.Open();
              gFilterOpened = true;
            }
            sleep(1);
            board_pts.clear();
            for (int row = 0; row < 6; row++) {
              for (int col = 0; col < 9; col++) {
                double x = col * 140 + 400;
                double y = row * 140 + 190;
                x = x * gWidth / 1920;
                y = y * gHeight / 1080;
                board_pts.push_back(Point2f(x, y));
              }
            }
            cout << "Perspective" << endl;
            Perspective.PM(img, board_pts, Calibration.mCameraMatrix, Calibration.mDistCoeffs);
            if (Perspective.mState) {
              cout << "Perspective Success" << endl;
              gRunMode = SwitchMode(ePerspective, gControlMode);
              if (gFilterOpened) {
                Servo.Close();
                gFilterOpened = false;
              }
            }
            break;
          case eRecognition:
            if (gFilterOpened) {
              Servo.Close();
              gFilterOpened = false;
            }
            if (!gHomographyInited) {
              Perspective.InitHomography();
            }
            // 使用cv::Rect定义矩形区域
            rect = cv::Rect(Perspective.xmin, Perspective.ymin, Perspective.w, Perspective.h);

            // 使用cv::Mat的构造函数来抠出矩形区域
            // resize(img, img, Size(1280, 720), INTER_CUBIC);
            sub_img = img(rect);
            pts.clear();
            Detection.mNeedReognition = true;
            Detection.Run(sub_img, pts);
            if (pts.size() > 0) {
              // if (gDistCoeffsInited)
              //   undistortPoints(pts, pts, Calibration.mCameraMatrix,
              //                   Calibration.mDistCoeffs);
              if (gHomographyInited) {
                for (auto &&pt : pts) {
                  pt.x+=Perspective.xmin;
                  pt.y+=Perspective.ymin;
                  Perspective.IPMpoint(pt, pt);
                  // circle(img, pt, (int)20, RED, 2);
                }
              }

              // pt_count++;
              cout << "frame: " << frame_count << " pt.size: " << pts.size() << " pt0:" << pts[0] << endl;
              UdpSendPoints.SendPoints(pts, 0);  // UDP
              // TcpSendThread.DivideGroup(pts); // TCP
            }
            // img_Path = "/home/pi/data/res/" + std::to_string(frame_count) + ".png";
            // imwrite(img_Path, img);
            break;
          default:
            if (!gFilterOpened) {
              Servo.Open();
              gFilterOpened = true;
            }
            break;
        }
        if (frame_count % frame_interval == 0) {
          Mat img_to_send = img.clone();
          if (!gHomographyInited) {
              Perspective.InitHomography();
          }
          if (gHomographyInited) {
            if (Perspective.RunTimeVisiblePoints.size() == 8) {
              // 计算最小外接矩形
              Rect min_rect = boundingRect(Perspective.RunTimeVisiblePoints);
              // 长宽各增加20像素
              min_rect.x -= 10;
              min_rect.y -= 20;
              min_rect.width += 20;
              min_rect.height += 20;

              // 确保矩形在图像范围内
              min_rect.x = max(0, min_rect.x);
              min_rect.y = max(0, min_rect.y);
              min_rect.width = min(min_rect.width, img.cols-1 - min_rect.x);
              min_rect.height = min(min_rect.height, img.rows-1 - min_rect.y);

              // 取最小外接正方形
              int square_size = max(min_rect.width, min_rect.height);
              square_size = min(square_size,min(img.cols-1,img.rows-1));
              min_rect.width = square_size;
              min_rect.height = square_size;

              // 确保正方形在图像范围内
              if (min_rect.x + square_size > img.cols-1) {
                min_rect.x = img.cols-1 - square_size;
              }
              if (min_rect.y + square_size > img.rows-1) {
                min_rect.y = img.rows-1 - square_size;
              }

              // 提取ROI并resize
              Mat roi = img(min_rect);
              resize(roi, img_to_send, Size(500, 500));
            }
          } else {
            // 从原图中心位置抠出500x500大小的区域
            int center_x = img.cols / 2;
            int center_y = img.rows / 2;
            int roi_size = 500;
            
            // 确保ROI不超出图像边界
            int x = max(0, center_x - roi_size/2);
            int y = max(0, center_y - roi_size/2);
            x = min(x, img.cols-1 - roi_size);
            y = min(y, img.rows-1 - roi_size);
            
            // 提取中心区域
            Rect center_roi(x, y, roi_size, roi_size);
            img_to_send = img(center_roi);
          }
          // imwrite("img_to_send.jpg", img_to_send);
          UdpSendThread.mat_que.push(img_to_send);
          UdpSendThread.mSemUdp.Post();
        }
      }
    }
  }
  destroyAllWindows();
  return 0;
}

int GetRunMode(int control_mode) {
  int runmode = gRunMode;
  switch (control_mode) {
    case 0:
      runmode = eRecognition;
      break;
    case 1:
    case 3:
      runmode = eCalibration;
      break;
    case 2:
    case 4:
      runmode = ePerspective;
      break;
    case 5:
      runmode = eDetection;
      break;
    default:
      break;
  }

  return runmode;
}

int SwitchMode(int cur_mode, int control_mode) {
  int new_mode = eRecognition;
  switch (cur_mode) {
    case eCalibration:
      if (control_mode == 1) {
        new_mode = ePerspective;
      }
      gDistCoeffsInited = true;
      Calibration.mMode = CLEAR;
      break;
    case ePerspective:
      Perspective.mState = false;
      break;

    default:
      break;
  }
  return new_mode;
}

void SetImageSource(void) {
  string str;
  switch (gSource) {
    case eCamera:
      if (!gInputCapture.isOpened()) {
        gInputCapture.open(gCameraID);
        // gInputCapture.set(CV_CAP_PROP_MODE,CV_CAP_MODE_GRAY);
        gInputCapture.set(CAP_PROP_FRAME_WIDTH, gWidth);
        gInputCapture.set(CAP_PROP_FRAME_HEIGHT, gHeight);
      }

      break;
    case eVideo:
      // group 1
      str = "/home/pi/data/Capture00001.avi";
      gCameraStrs.push_back(str);
      // str = "/home/pi/data/Capture00002.avi";
      // gCameraStrs.push_back(str);
      // str = "/home/pi/data/Capture00003.avi";
      // gCameraStrs.push_back(str);
      break;
    default:
      break;
  }
}

Mat GetImage(void) {
  Mat img;
  static int video_idx = 0;
  switch (gSource) {
    case eCamera:
      if (gInputCapture.isOpened())
        gInputCapture >> img;
      else
        gImage_OK = false;
      break;
    case eVideo:
      if (gInputCapture.isOpened()) {
        gInputCapture >> img;
        if (img.empty()) {
          if (video_idx < gCameraStrs.size()) {
            gInputCapture.open(gCameraStrs[video_idx]);
            cout << gCameraStrs[video_idx] << endl;
            video_idx++;
            if (gInputCapture.isOpened()) gInputCapture >> img;
          } else
            gImage_OK = false;
        }
      } else {
        if (video_idx < gCameraStrs.size()) {
          gInputCapture.open(gCameraStrs[video_idx]);
          cout << gCameraStrs[video_idx] << endl;
          video_idx++;
          if (gInputCapture.isOpened()) gInputCapture >> img;
        } else
          gImage_OK = false;
      }
      break;
    default:
      break;
  }
  return img;
}

bool CheckAddress(void) {
  struct ifaddrs *ifAddrStruct = NULL;

  getifaddrs(&ifAddrStruct);

  // get mac, need to create socket first, may not work for mac os
  struct ifreq ifr;
  int fd = socket(AF_INET, SOCK_DGRAM, 0);

  char local_mac[128] = {0};
  const std::string mac_lock = "DEFAULT_MAC_ADDRESS";

  strcpy(ifr.ifr_name, "eth0");  // only need ethernet card
  if (0 == ioctl(fd, SIOCGIFHWADDR, &ifr)) {
    char temp_str[10] = {0};
    memcpy(temp_str, ifr.ifr_hwaddr.sa_data, 6);
    sprintf(local_mac, "%02x-%02x-%02x-%02x-%02x-%02x", temp_str[0] & 0xff, temp_str[1] & 0xff, temp_str[2] & 0xff,
            temp_str[3] & 0xff, temp_str[4] & 0xff, temp_str[5] & 0xff);
  }

  if (local_mac != mac_lock) {
    return false;
  } else {
    return true;
  }
}
