#include "solar_system_stack.h"

#define _USE_MATH_DEFINES

#include <algorithm>

#include "elog.h"
#include "elog_file.h"
#include "timer_utils.h"

using namespace std;
using namespace cv;

bool overlayimage(Mat img1,Mat img2,Mat& output,float sharpness) {
    if (img1.empty() || img2.empty() || img1.size() != img2.size()) {
        return false;
    }

    Mat gray_img1, gray_img2;
    cvtColor(img1, gray_img1, COLOR_BGR2GRAY);
    cvtColor(img2, gray_img2, COLOR_BGR2GRAY);
    
    Mat H = Mat::eye(3, 3, CV_32F);
    double error = findTransformECC(gray_img2, gray_img1, H, MOTION_HOMOGRAPHY);

    Mat img2transform;
    vector<Point2f> img2corners,img2transformcorners;
    img2corners.emplace_back(Point2f(0, 0));
    img2corners.emplace_back(Point2f(img1.cols, 0));
    img2corners.emplace_back(Point2f(0, img2.rows));
    img2corners.emplace_back(Point2f(img2.cols, img2.rows));
    perspectiveTransform(img2corners, img2transformcorners, H);
    Rect transform_rect = boundingRect(img2transformcorners);
    //cout << "transform_rect is " << transform_rect << endl;

    Mat white_img(img1.size(), CV_8UC1,Scalar(255)),white_mask;

    warpPerspective(white_img, white_mask, H, img1.size(), INTER_NEAREST, BORDER_CONSTANT, Scalar(0));
    warpPerspective(img2, img2transform, H, img1.size());
    Mat t_output = img1.clone();
    for (int row = 0; row < img1.rows; row++)
    {
        for (int col = 0; col < img2.cols; col++)
        {
            if (white_mask.at<unsigned char>(row, col)) {
                t_output.at<unsigned char>(row, col) = img1.at<unsigned char>(row, col) * 0.5f + img2transform.at<unsigned char>(row, col) * 0.5f;
            }
        }
    }
    
    cv::Mat sharpen_kernel = (cv::Mat_<float>(3, 3) <<
        -1, -1, -1,
        -1, 9, -1,
        -1, -1, -1);

    sharpen_kernel *= sharpness;
    filter2D(t_output, t_output, CV_8U, sharpen_kernel);

    //imwrite("white_mask.png", white_mask);
    //imwrite("transform.png", img2transform);
    //imwrite("stack.png", output);
    output = t_output;

    return true;
}

bool overlayimage(vector<Mat> imgs , Mat& output, float sharpness) {
    if (imgs.size() < 2) {
        return false;
    }
    vector<Mat> gray_imgs(imgs.size());
    for (int i = 0; i < imgs.size(); i++)
    {
        cvtColor(imgs[i], gray_imgs[i], COLOR_BGR2GRAY);
    }

    Mat sum_mat(imgs[0].size(), CV_32FC3, Scalar(0,0,0));

    for (int i = 0; i < imgs.size(); i++)
    {
        if (i == 0) {
            Mat floatimg;
            imgs[0].convertTo(floatimg, CV_32FC3);
            cv::add(sum_mat, floatimg, sum_mat);
        }
        else {
            Mat H = Mat::eye(3, 3, CV_32F);
            double error = findTransformECC(gray_imgs[i], gray_imgs[0], H, MOTION_HOMOGRAPHY);

            Mat img2transform;
            vector<Point2f> img2corners, img2transformcorners;
            img2corners.emplace_back(Point2f(0, 0));
            img2corners.emplace_back(Point2f(imgs[i].cols, 0));
            img2corners.emplace_back(Point2f(0, imgs[i].rows));
            img2corners.emplace_back(Point2f(imgs[i].cols, imgs[i].rows));
            perspectiveTransform(img2corners, img2transformcorners, H);
            Rect transform_rect = boundingRect(img2transformcorners);

            Mat white_img(imgs[0].size(), CV_8UC1, Scalar(255)), white_mask;

            warpPerspective(white_img, white_mask, H, imgs[0].size(), INTER_NEAREST, BORDER_CONSTANT, Scalar(0));
            warpPerspective(imgs[i], img2transform, H, imgs[0].size());

            img2transform.convertTo(img2transform, CV_32FC3);
            cv::add(sum_mat, img2transform, sum_mat, white_mask);
        }
    }
    sum_mat /= imgs.size();
    sum_mat.convertTo(sum_mat, CV_8UC3);

    cv::Mat sharpen_kernel = (cv::Mat_<float>(3, 3) << 
        -1, -1, -1,
        -1, 9, -1,
        -1, -1, -1);

    sharpen_kernel *= sharpness;

    filter2D(sum_mat, sum_mat, CV_8U, sharpen_kernel);
    output = sum_mat;
    return true;
}

int recognizeSunAndMoon(const Mat& img, Rect& target_bounding_box) {
    Mat img_gray;
    cvtColor(img, img_gray, COLOR_BGR2GRAY);
    img_gray.convertTo(img_gray, CV_8UC1, 1 / 256.0, 0);
    // imwrite("/userdata/moon/img_gray.png", img_gray);

    Mat dstHist;
	float hranges[] ={0, 256};			// 取值范围[0,256)
	const float *ranges[] = {hranges};
	int size = 256;
	int channels = 0;

    double maxVal = 0;
	Point maxLoc;

    //计算图像的直方图
	// calcHist(&img_gray, 1, &channels, Mat(), dstHist, 1, &size, ranges);
	// minMaxLoc(dstHist, NULL, &maxVal, NULL, &maxLoc);
	// double threshold = maxLoc.y;
    cv::Scalar mean;  
    cv::Scalar dev;
	cv::meanStdDev(img_gray, mean, dev);
	double threshold = (mean.val[0] + 1.0 * dev.val[0]) > 254 ? 254 : (unsigned int)(mean.val[0] + 1.0 * dev.val[0]);

    cv::threshold(img_gray, img_gray, threshold, 0, THRESH_TOZERO);
    // imwrite("/userdata/moon/moon_threshold.png", img_gray);

	vector<vector<Point>> contours;
	findContours(img_gray, contours, noArray(), RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);

	// Mat imgNosieContour = Mat::zeros(img_gray.rows, img_gray.cols, CV_8UC3);
	// drawContours(imgNosieContour, contours, -1, Scalar(0,0,255));
    // imwrite("/userdata/moon/contour.png", imgNosieContour);

    double area_max = 0;
	for(int i = 0; i < contours.size(); i++) {
        double area = contourArea(contours[i]);
		if (area  > area_max) {
            area_max = area;
            target_bounding_box = boundingRect(contours[i]);
        }
	}
    
    return 0;
}

int overlayimage(Mat& img_primary, const Mat& img_new, Mat& img_stack, Mat& img_stack_all, Mat& normal_counter, float sharpness) {
    if (img_new.empty()) 
        return -1;

    if(img_stack_all.empty()) 
		return -1;

	if(normal_counter.empty()) 
		return -1;

    static Rect img_primary_target_rect;
    static Mat img_primary_target_gray;
    if (!img_primary.empty()) {
        recognizeSunAndMoon(img_primary, img_primary_target_rect);
        log_i("img_primary_target_rect.x:%d, img_primary_target_rect.y:%d, img_primary_target_rect.width:%d, img_primary_target_rect.height:%d\n", img_primary_target_rect.x, img_primary_target_rect.y, img_primary_target_rect.width, img_primary_target_rect.height);
        try {
            Mat img_primary_target = img_primary(img_primary_target_rect);
            cvtColor(img_primary_target, img_primary_target_gray, COLOR_BGR2GRAY);
            img_primary_target_gray.convertTo(img_primary_target_gray, CV_8UC1, 1.0 / 256);
            img_primary.release();
        } catch (cv::Exception& e) {
            const char* err_msg = e.what();
            printf("OpenCV exception caught:%s\n", err_msg);
            return -2;
        }
    }

    Rect img_new_target_rect_directly;
    recognizeSunAndMoon(img_new, img_new_target_rect_directly);
    log_i("img_new_target_rect_directly:(%d, %d, %d, %d)\n", img_new_target_rect_directly.x, img_new_target_rect_directly.y, img_new_target_rect_directly.width, img_new_target_rect_directly.height);

    Point img_new_target_rect_directly_center(img_new_target_rect_directly.x + img_new_target_rect_directly.width / 2, img_new_target_rect_directly.y + img_new_target_rect_directly.height / 2);

    Rect img_new_target_rect;
    img_new_target_rect.x = img_new_target_rect_directly_center.x - img_primary_target_rect.width / 2;
    img_new_target_rect.y = img_new_target_rect_directly_center.y - img_primary_target_rect.height / 2;
    img_new_target_rect.width = img_primary_target_rect.width;
    img_new_target_rect.height = img_primary_target_rect.height;

    log_i("img_new_target_rect:(%d, %d, %d, %d)\n", img_new_target_rect.x, img_new_target_rect.y, img_new_target_rect.width, img_new_target_rect.height);

    if (img_new_target_rect.x < 0 || img_new_target_rect.x + img_new_target_rect.width > img_new.cols)
        return -3;
    if (img_new_target_rect.y < 0 || img_new_target_rect.y + img_new_target_rect.height > img_new.rows)
        return -3;

    Mat img_new_target_gray;
    try {
        Mat img_new_target = img_new(img_new_target_rect);
        cvtColor(img_new_target, img_new_target_gray, COLOR_BGR2GRAY);
        img_new_target_gray.convertTo(img_new_target_gray, CV_8UC1, 1.0 / 256);
    } catch (cv::Exception& e) {
        const char* err_msg = e.what();
        log_i("OpenCV exception caught:%s\n", err_msg);
        return -3;
    }

    try {
        Mat H = Mat::eye(2, 3, CV_32F);
        double error = findTransformECC(img_new_target_gray, img_primary_target_gray, H, MOTION_EUCLIDEAN);// 只接受8UC1 or 32FC1 type，否则会报异常，线程报异常不会在终端显示，线程直接退出，不好排查

        // 将小图的旋转中心调整为全图的旋转中心
        H.at<float>(0, 2) += (1 - H.at<float>(0, 0)) * img_primary_target_rect.x + H.at<float>(1, 0) * img_primary_target_rect.y;
        H.at<float>(1, 2) += (1 - H.at<float>(0, 0)) * img_primary_target_rect.y - H.at<float>(1, 0) * img_primary_target_rect.x;

        // 考虑小图之间的偏移，再作用于全图
        H.at<float>(0, 2) += img_primary_target_rect.x - img_new_target_rect.x;
        H.at<float>(1, 2) += img_primary_target_rect.y - img_new_target_rect.y;

        Mat img_new_transform;
        warpAffine(img_new, img_new_transform, H, img_new.size());

        img_stack = cv::Mat::zeros(img_new.rows, img_new.cols, CV_16UC3);

        float* img_stack_all_ptr = reinterpret_cast<float*>(img_stack_all.data);
        uint16_t* img_stack_ptr = reinterpret_cast<uint16_t*>(img_stack.data);
        uint16_t* normal_counter_ptr = reinterpret_cast<uint16_t*>(normal_counter.data);
        uint16_t* img_new_transform_ptr = reinterpret_cast<uint16_t*>(img_new_transform.data);

        size_t data_size = img_stack_all.cols * img_stack_all.rows * img_stack_all.channels();
        for (size_t i = 0; i < data_size; ++i) {
            img_stack_all_ptr[i] += img_new_transform_ptr[i];
            normal_counter_ptr[i] += img_new_transform_ptr[i] > 0 ? 1 : 0;
            img_stack_ptr[i] = img_stack_all_ptr[i] / normal_counter_ptr[i];
        }

        cv::Mat sharpen_kernel = (cv::Mat_<float>(3, 3) << 
            -1, -1, -1,
            -1, 9, -1,
            -1, -1, -1);

        sharpen_kernel *= sharpness;

        filter2D(img_stack, img_stack, -1, sharpen_kernel);

    } catch (cv::Exception& e) {
        const char* err_msg = e.what();
        log_i("OpenCV exception caught:%s\n", err_msg);
        return -4;
    }

    return 0;
}