#include "mainfunction.h"
#include "ui_mainfunction.h"

#include <QImage>
#include <QFileDialog>
#include "app.h"
#include "FaceAction.h"
#include <QMessageBox>
#include <iostream>
#include <seeta/Struct.h>

using namespace std;

using cv::VideoCapture;
using cv::imread;
using cv::Mat;
using cv::waitKey;

using seeta::ImageData;
using seeta::QualityLevel;


MainFunction::MainFunction (QWidget *parent) :
	QMainWindow (parent),
	ui (new Ui::MainFunction)
{
	ui->setupUi (this);
}

MainFunction::~MainFunction()
{
	delete ui;
}

void
MainFunction::on_pushButton_clicked()
{
	QLabel *img_frame = ui->left_img_frame;
	QImage img (QFileDialog::getOpenFileName (this, "选择人脸图片"));
	if (!img.isNull())
		img_frame->setPixmap (QPixmap::fromImage (img.scaled (img_frame->size())));
}


void
MainFunction::on_pushButton_2_clicked()
{
	QLabel *img_frame = ui->right_img_frame;
	QImage img (QFileDialog::getOpenFileName (this, "选择人脸图片"));
	if (!img.isNull())
		img_frame->setPixmap (QPixmap::fromImage (img.scaled (img_frame->size())));
}

void
MainFunction::on_pushButton_3_clicked()
{
	if (ui->left_img_frame->pixmap().isNull() || ui->right_img_frame->pixmap().isNull())
		return;
	using namespace YQ;
	char str[100];
	constexpr float threshold = 0.8;
	try {
		shared_ptr<FaceExtractor> extractor = make_shared<FaceExtractor>();
		shared_ptr<ImageData>img_1 = make_shared<ImageData> (toImageData (toMat (
		                                 ui->left_img_frame->pixmap().toImage())));
		shared_ptr<ImageData>img_2 = make_shared<ImageData> (toImageData (toMat (
		                                 ui->right_img_frame->pixmap().toImage())));
		extractor->setImage (img_1);
		shared_ptr<ExtractResult> result_1 = extractor->extractBiggestFaceInfo();
		extractor->setImage (img_2);
		shared_ptr<ExtractResult>result_2 = extractor->extractBiggestFaceInfo();
		
		float similarity = getFaceSimilarity (result_1, result_2, extractor);
		snprintf (str, sizeof (str), "%s, 相似度为：%.3f",
		          similarity > threshold ? "是同一个人" : "不是同一个人", similarity);
	} catch (runtime_error e) {
		snprintf (str, sizeof (str), e.what());
	}
	QFont font = ui->compare_result->font();
	font.setPointSize (16);
	ui->compare_result->setFont (font);
	ui->compare_result->setText (str);
}


void
MainFunction::on_start_camera_clicked()
{
	const string model_path = "models/";
	
	this->camera.open (0);
	if (!camera.isOpened())
		QMessageBox::question (this, "错误", "摄像头打开失败!");
	FaceAction actioner (model_path);
	for (;;) {
		Mat tmp;
		camera >> tmp;
		if (tmp.empty())
			break;
		this->current_img = tmp;
		clock_t time_beg = clock();
		cv::flip (current_img, current_img, 1);
		Mat img_showed = current_img.clone();
		ImageData seeta_img = YQ::toImageData (current_img);
		SeetaFaceInfoArray face_array = actioner.FD->detect (seeta_img);
		waitKey (1);
		SeetaRect face;
		for (int i = 0; i < face_array.size ; ++i) {
			face = face_array.data[i].pos;
			actioner.init_Mat (img_showed, face);
			actioner.setPoints68();
			actioner.setPoints5();
			
			if (this->use_5_points)
				img_showed = actioner.drawPoints (5, img_showed, true);
				
			if (this->use_68_points)
				img_showed = actioner.drawPoints (68, img_showed, true);
				
			if (this->show_iris)
				img_showed = actioner.showEyeCenter (img_showed);
		}
		double duration = (double) ( (clock() - time_beg)) / CLOCKS_PER_SEC;
		double fps = 1 / duration;
		std::string fpsShow = "fps : " + std::to_string (fps);
		cv::putText (img_showed, fpsShow, cv::Point (40, 40), cv::FONT_HERSHEY_COMPLEX, 0.5, CV_RGB (255, 0,
		             242), 1);
		ui->camera_frame->setPixmap (QPixmap::fromImage (YQ::toQImage (img_showed).scaled (
		                                 ui->camera_frame->size())));
	}
}

void
MainFunction::on_toggle_use_5_points_toggled (bool checked)
{
	this->use_5_points = checked;
}


void
MainFunction::on_toggle_use_68_points_toggled (bool checked)
{
	this->use_68_points = checked;
}


void
MainFunction::on_toggle_show_iris_toggled (bool checked)
{
	this->show_iris = checked;
}

static string
toString (QualityLevel level)
{
	if (level == QualityLevel::HIGH)
		return "优";
	else if (level == QualityLevel::MEDIUM)
		return "良";
	else if (level == QualityLevel::LOW)
		return "差";
	return "未知";
}

void
MainFunction::on_stop_camera_clicked()
{
	this->camera.release();
	if (this->current_img.empty())
		return;
	shared_ptr<ImageData>img = make_shared<ImageData> (YQ::toImageData (this->current_img));
	shared_ptr<YQ::FaceExtractor> extracter = make_shared<YQ::FaceExtractor>();
	extracter->setImage (img);
	ui->detect_data->clear();
	ui->detect_data->setFontPointSize (16);
	try {
		shared_ptr<YQ::ExtractResult> extract_result = extracter->extractBiggestFaceInfo();
		YQ::QualityChecker checker (img, extract_result);
		stringstream ss;
		ss << extract_result->face_info.score;
		ui->detect_data->append ( ("置信度:\t\t" + ss.str()).c_str());
		ui->detect_data->append ( ("亮度:\t\t" + toString (checker.getBrightnessQuality().level)).c_str());
		ui->detect_data->append ( ("清晰度:\t\t" + toString (checker.getClarityQuality().level)).c_str());
		ui->detect_data->append ( ("姿态:\t\t" + toString (checker.getPoseQuality().level)).c_str());
		ui->detect_data->append ( ("面部完整性:\t" + toString (checker.getIntergrityQuality().level)).c_str());
		ui->detect_data->append ( ("分辨率:\t\t" + toString (checker.getResolutionQuality().level)).c_str());
	} catch (runtime_error e) {
		ui->detect_data->setText ("图中无人脸");
	}
}











