#include "face_api_helper.h"
#include <stdlib.h>
#include <vector>
#include "json/json.h"
#include "setting.h"
using namespace std;

// 通过修改setting类里的对应参数，来达到调整人脸检测策略的目的
void face_setting(BaiduFaceApi* api)
{
    Setting *setptr = new Setting();
    setptr->set_blur_thr(api);
    setptr->set_detect_in_video_interval(api);
    setptr->set_eulur_angle_thr(api);
    setptr->set_illum_thr(api);
    setptr->set_is_check_quality(api);
    // 设置最小人脸
    setptr->set_min_face_size(api);
    setptr->set_notface_thr(api);
    setptr->set_occlu_thr(api);
    setptr->set_track_by_detection_interval(api);
    delete setptr;
}


//FaceApiHelper* FaceApiHelper::instance = NULL;
FaceApiHelper::FaceApiHelper()
{
	api = new BaiduFaceApi();
}


FaceApiHelper::FaceApiHelper(const FaceApiHelper&)
{

}


FaceApiHelper& FaceApiHelper::operator=(const FaceApiHelper& )
{
}


int FaceApiHelper::sdk_init(bool is_card)
{
	std::cout << "FaceApiHelper sdk init begin" << std::endl;
    std::lock_guard<std::mutex> lck(api_mt); 

	int ret = api->sdk_init(is_card);
    face_setting(api); //这个设置千万不能打开，不如检测人脸特征接口只有第一次调用返回512，多次调用都是0 

	std::cout << "FaceApiHelper sdk init end" << std::endl;
  
	return ret;
}


bool FaceApiHelper::is_auth()
{
	return api->is_auth();
}

face_attr_t FaceApiHelper::face_attr_by_base64(const string& img_base64)
{
    std::lock_guard<std::mutex> lck(api_mt);
    string res = api->face_attr(img_base64.c_str(),1);
    std::cout << res << std::endl;
	Json::Reader reader;
	Json::Value json_obj;
	
	if (!reader.parse(res.c_str(), json_obj)) {
		throw JsonFormatException(res); 
	}

	int err = json_obj["errno"].asInt();
	string msg = json_obj["msg"].asString();
	if( err != 0 ) {
		throw ApiReturnError(msg);
	}

	face_attr_t attr;
	attr.age = stof(json_obj["data"]["result"]["age"].asString().c_str());
	attr.expression = stoi(json_obj["data"]["result"]["expression"].asString());
	attr.expression_conf = stof(json_obj["data"]["result"]["expression_conf"].asString());

	attr.gender = stoi(json_obj["data"]["result"]["gender"].asString());
	attr.gender_conf = stof(json_obj["data"]["result"]["gender_conf"].asString());

	attr.glass = stoi(json_obj["data"]["result"]["glass"].asString());
	attr.glass_conf = stof(json_obj["data"]["result"]["glass_conf"].asString());

	attr.race = stoi(json_obj["data"]["result"]["race"].asString());
	attr.race_conf = stof(json_obj["data"]["result"]["race_conf"].asString());

	return attr;
}


face_quality_t FaceApiHelper::face_quality_by_base64(const string& img_base64)
{
    std::lock_guard<std::mutex> lck(api_mt);

	string res = api->face_quality(img_base64.c_str(),1);
    std::cout << "face quality:"  << res << std::endl;
	Json::Reader reader;
	Json::Value json_obj;
	
	if (!reader.parse(res.c_str(), json_obj)) {
		throw JsonFormatException(res); 
	}

	int err = json_obj["errno"].asInt();
	string msg = json_obj["msg"].asString();
	if( err != 0 ) {
		throw ApiReturnError(msg);
	}

	face_quality_t quality;
	quality.bluriness = stof(json_obj["data"]["result"]["bluriness"].asString());
	quality.illum = stoi(json_obj["data"]["result"]["illum"].asString());

	quality.occl_chin = stof(json_obj["data"]["result"]["occl_chin"].asString());

	quality.occl_l_contour = stof(json_obj["data"]["result"]["occl_l_contour"].asString());
	quality.occl_l_eye = stof(json_obj["data"]["result"]["occl_l_eye"].asString());

	quality.occl_mouth = stof(json_obj["data"]["result"]["occl_mouth"].asString());
	quality.occl_nose = stof(json_obj["data"]["result"]["occl_nose"].asString());

	quality.occl_r_contour = stof(json_obj["data"]["result"]["occl_r_contour"].asString());
	quality.occl_r_eye = stof(json_obj["data"]["result"]["occl_r_eye"].asString());
 
	return quality;
}


int FaceApiHelper::track(std::vector<TrackFaceInfo> *& out, const cv::Mat &mat, int maxTrackObjNum)
{

	std::lock_guard<std::mutex> lck(api_mt);
	return api->track(out, mat, maxTrackObjNum);
}


int FaceApiHelper::track_by_base64(std::vector<TrackFaceInfo>* & out,const string& image, int maxTrackObjNum)
{
	std::lock_guard<std::mutex> lck(api_mt);
    return api->track(out,image.c_str(),1,maxTrackObjNum);
   
}

int FaceApiHelper::get_tracked_faces(std::vector<TrackFaceInfo> *& out)
{
	std::lock_guard<std::mutex> lck(api_mt);
	return api->get_tracked_faces(out);
}


 int FaceApiHelper::get_face_feature_by_filepath(const std::string& img_path,std::vector<float>& feature)
{
	std::lock_guard<std::mutex> lck(api_mt);
	get_feature(img_path,2,feature);
	return feature.size();
}


int FaceApiHelper::get_feature(const std::string& img,int type,std::vector<float>& features)
{
	
	const float* feature = NULL;// = new float[512]; 
	
    int feature_num = api->get_face_feature(img.c_str(),type,feature);
	//std::cout <<"GetFaceFeature:" << feature_num << std::endl;

	for( uint32_t i = 0; i < feature_num; i++) {
		//std::cout<<feature+i <<std::endl;
		if(!isnan(feature[i])) {
			features.push_back(feature[i]);
		}else {
			features.push_back(0);
		}
	}

	return feature_num;
}

template<typename T>
void dump_vector(const std::vector<T>& vec)
{
	std::cout << "--------------------------" << std::endl;
	for( uint32_t i = 0; i < vec.size(); i++ ){
		if( i & 8 == 0) {
			std::cout << std::endl;
		} else{
		    std::cout << vec[i];
		}
	}
}

int FaceApiHelper::get_face_feature_by_base64(const std::string& image, std::vector<float>& features)
{
	std::lock_guard<std::mutex> lck(api_mt);
	get_feature(image,1,features);
	return features.size(); 
}


float FaceApiHelper::match_by_base64(const std::string& img1,const std::string& img2)
{

	std::lock_guard<std::mutex> lck(api_mt);
	//std::cout << api.match(img1.c_str(),1,img2.c_str(),1) << std::endl;
    //return 10;
	std::vector<float> fs1,fs2;
	get_feature(img1,1,fs1);
	get_feature(img2,1,fs2);
	
	//dump_vector(fs1);
	//dump_vector(fs2);
	return api->compare_feature(fs1,fs2);
}


float FaceApiHelper::compare_feature(const std::vector<float>& f1,const std::vector<float>& f2)
{
	//std::lock_guard<std::mutex> lck(api_mt);
    return api->compare_feature(f1,f2);
}

