#include <iostream>
#include <opencv2/opencv.hpp>
#include "ImageProcessingLib.hpp"
#include "opencv2/objdetect.hpp"
#include "opencv2/videoio.hpp"
#include "opencv2/highgui.hpp"
#include "opencv2/imgproc.hpp"

#include <opencv2/tracking.hpp>
#include <opencv2/core/ocl.hpp>
#include <arpa/inet.h>

#include "HttpReq.h"

//using namespace std;
//using namespace cv;
//
//void Gaussian_Pyramid(Mat &image, vector<Mat> &pyramid_images, int level);
//void Laplaian_Pyramid(vector<Mat> &pyramid_images, Mat &image);
//void reconstuction(int level);
//
////static string faceData = "/home/monoliths/Documents/Project/OpenCV/TestHumenFaceReconous/cascades/haarcascade_frontalface_default.xml";
////
////bool faceDetected = false;
//// bool eye
//
//int imageDetectLoop()
//{
//
//    ocl::setUseOpenCL(false);
//    auto videoCapture = make_unique<VideoCapture>();
//    videoCapture->open(0);
//
//    auto faceCascade = make_unique<CascadeClassifier>(faceData);
//
//    // auto eyesCascade = make_unique<CascadeClassifier>(eyesData);
//    // auto upBodyCascade = make_unique<CascadeClassifier>(upBodyData);
//
//    auto faceTracker = TrackerKCF::create();
//    // auto eyesTracker = TrackerKCF::create();
//    // auto upBodyTracker = TrackerKCF::create();
//
//    auto key = waitKey(10);
//
//    while (videoCapture->isOpened())
//    {
//        Mat src;
//        if (videoCapture->read(src))
//        {
//            Mat gray;
//            cvtColor(src, gray, ColorConversionCodes::COLOR_BGR2GRAY);
//
//            std::vector<Rect> facesPostion;
//            faceCascade->detectMultiScale(gray, facesPostion, 1.3, 5);
//
//            // std::vector<Rect> eyesPosition;
//            // eyesCascade->detectMultiScale(gray, eyesPosition, 1.3, 5);
//            // std::vector<Rect> upBodyPosition;
//            // upBodyCascade->detectMultiScale(gray, upBodyPosition, 1.3, 5);
//
//            Rect face;
//            if (!faceDetected && !facesPostion.empty())
//            {
//                faceTracker->init(src, facesPostion[0]);
//                faceDetected = true;
//            }
//
//            if (faceDetected && faceTracker->update(src, face))
//            {
//                rectangle(src, face, Scalar(0, 0, 255), 2);
//            }
//
//            // for (auto iface : facesPostion)
//            // {
//            //     rectangle(src, iface, Scalar(0, 0, 255), 2);
//            // }
//
//            // for (auto ieye : eyesPosition)
//            // {
//            //     rectangle(src, ieye, Scalar(255, 0, 0), 2);
//            // }
//
//            // for (auto iupBody : upBodyPosition)
//            // {
//            //     rectangle(src, iupBody, Scalar(0, 255, 0), 2);
//            // }
//
//            if (key == 27)
//            {
//                break;
//            }
//            else if (key == 32)
//            {
//                imwrite("Capture.jpg", src);
//                cout << "tack photo" << endl;
//            }
//
//            imshow("OpenCV", src);
//        }
//        auto key = waitKey(10);
//    }
//
//    return 0;
//}


//#include "HttpStreamReceive.h"
//
//cv::Mat char_to_mat(char *image_buf, int image_len){
//    cv::Mat mat;
//    std::string str2(image_buf, image_len);
//    std::vector<char> vec_data1(str2.c_str(), str2.c_str() + str2.size());
//    mat = cv::imdecode(vec_data1, -1).clone();
//    return mat;
//}
//
//
//static std::string faceData = "/home/monoliths/Documents/Project/OpenCV/TestHumenFaceReconous/cascades/haarcascade_frontalface_default.xml";
//
//bool faceDetected = false;
//
//
//auto faceCascade = new cv::CascadeClassifier(faceData);
//
//auto faceTracker = cv::TrackerKCF::create();
//
//bool callback(char* data_, int length_)
//{
//    if (length_ > 100)
//    {
////        cv::Mat mat(1280, 720, 16);
////        cv::Mat mat(1280, 720, 16, (void*) data_);
////        cv::imshow("window", mat);
//        //读取
////        fIn.read((char*)YUVFrameBuff, size*sizeof(unsigned char));
////        memcpy(mat.data,reinterpret_cast<unsigned char *>(data_), length_ * sizeof(unsigned char));
////        cv::Mat bgrImg;
////        cv::cvtColor(mat, bgrImg, cv::COLOR_RGB2BGR);
////        cv::namedWindow("bgrImg");
//        auto res = char_to_mat(data_, length_);
//        cv::Mat gray;
//        cvtColor(res, gray, cv::ColorConversionCodes::COLOR_BGR2GRAY);
//
//        std::vector<cv::Rect> facesPostion;
//        faceCascade->detectMultiScale(gray, facesPostion, 1.3, 5);
//
//        // std::vector<Rect> eyesPosition;
//        // eyesCascade->detectMultiScale(gray, eyesPosition, 1.3, 5);
//        // std::vector<Rect> upBodyPosition;
//        // upBodyCascade->detectMultiScale(gray, upBodyPosition, 1.3, 5);
//
//        cv::Rect face;
//        if (!faceDetected && !facesPostion.empty())
//        {
//            faceTracker->init(res, facesPostion[0]);
//            faceDetected = true;
//        }
//
//        if (faceDetected && faceTracker->update(res, face))
//        {
//            rectangle(res, face, cv::Scalar(0, 0, 255), 2);
//        }
//
//        cv::imshow("bgrImg", res);
//        cv::waitKey(10);
//
////    }else {
////        std::cout << "err" << std::endl;
//    }
//    return true;
//}

int main()
{
    cv::ocl::setUseOpenCL(false);

    //char *host = "www.baidu.com";
    //int port = 80;
    const char *host = "192.168.4.1";
    int port = 81;

    int fd = open("log",O_WRONLY|O_CREAT|O_TRUNC,0664);

    int recv_len,len;
    char sendData[40960] = {0};
    char recvData[40960] = {0};

    sprintf(sendData,"GET / HTTP/1.1\r\nAccept: */*\r\nAccept-Language: zh-Hans-CN, zh-Hans; q=0.8, en-US; q=0.5, en; q=0.3\r\nConnection: close\r\nHost: %s\r\nUser-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/17.17134\r\n\r\n",host);

    //recv_len = connect_alipay_new(host,port,sendData,strlen(sendData),recvData,sizeof(recvData));
    recv_len = connect_alipay_fd(host,port,sendData,strlen(sendData),fd);
    close(fd);



//    cv::ocl::setUseOpenCL(false);

//    auto img_ = imread("/home/monoliths/Downloads/20220824225216.jpg");
//    cout << img_.type() << endl;
//

//    const std::string host("192.168.4.1");
//    uint16_t port = 81;
//
//    std::string header("GET / HTTP/1.1\r\nAccept: */*\r\nAccept-Language: zh-Hans-CN, zh-Hans; q=0.8, en-US; q=0.5, en; q=0.3\r\nConnection: close\r\nHost: %s\r\nUser-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/17.17134\r\n\r\n");
//
//
//    auto* httpStream = new HttpStreamReceive(host, port);
//    httpStream->setRequestHeader(std::move(header));
//
//    httpStream->setPartReceivedCallback(callback);
//
//    httpStream->startRequestStream();
    return 0;
}
