package com.netsdk.demo.customize;

import static com.netsdk.demo.util.StructFieldChooser.GetSelectedSingleFieldValue;

import java.io.UnsupportedEncodingException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingDeque;

import com.netsdk.demo.util.CaseMenu;
import com.netsdk.lib.NetSDKLib;
import com.netsdk.lib.NetSDKLib.LLong;
import com.netsdk.lib.NetSDKLib.NET_ANALYSE_RULE;
import com.netsdk.lib.NetSDKLib.NET_ANALYSE_RULE_INFO;
import com.netsdk.lib.NetSDKLib.NET_CFG_LINKGROUP_INFO;
import com.netsdk.lib.NetSDKLib.NET_FACEANALYSIS_RULE_INFO;
import com.netsdk.lib.NetSDKLib.NET_IN_PUSH_ANALYSE_PICTURE_FILE;
import com.netsdk.lib.NetSDKLib.NET_OUT_ADD_ANALYSE_TASK;
import com.netsdk.lib.NetSDKLib.NET_OUT_PUSH_ANALYSE_PICTURE_FILE;
import com.netsdk.lib.NetSDKLib.NET_PUSH_PICFILE_INFO;
import com.netsdk.lib.ToolKits;
import com.netsdk.lib.utils.Initialization;
import com.sun.jna.Memory;
import com.sun.jna.Pointer;

public class FaceAanlysisTaskDemo extends Initialization {
	int taskId = 0;

	/**
	 * 1-添加解析任务
	 */
	public void addAnalyseTask() throws UnsupportedEncodingException {
		// 入参
		NET_PUSH_PICFILE_INFO pInParam = new NET_PUSH_PICFILE_INFO();
		pInParam.emStartRule = NetSDKLib.EM_ANALYSE_TASK_START_RULE.EM_ANALYSE_TASK_START_NOW;
		NET_ANALYSE_RULE_INFO[] net_analyse_rule_info = (NET_ANALYSE_RULE_INFO[]) new NET_ANALYSE_RULE_INFO()
				.toArray(32);
		net_analyse_rule_info[0].emClassType = NetSDKLib.EM_SCENE_CLASS_TYPE.EM_SCENE_CLASS_FACEANALYSIS;
		net_analyse_rule_info[0].dwRuleType = NetSDKLib.EVENT_IVS_FACEANALYSIS;
		net_analyse_rule_info[0].nObjectTypeNum = 1;
		net_analyse_rule_info[0].emObjectTypes[0] = NetSDKLib.EM_ANALYSE_OBJECT_TYPE.EM_ANALYSE_OBJECT_TYPE_HUMANFACE;

		NET_FACEANALYSIS_RULE_INFO pReserved = new NET_FACEANALYSIS_RULE_INFO();
		NET_CFG_LINKGROUP_INFO[] linkgroup_info = (NET_CFG_LINKGROUP_INFO[]) new NET_CFG_LINKGROUP_INFO().toArray(20);
		linkgroup_info[0].bEnable = true;
		String szGroupID1 = "1";
		System.arraycopy(szGroupID1.getBytes(encode), 0, linkgroup_info[0].szGroupID, 0,
				szGroupID1.getBytes(encode).length);
		Integer similarity = 75;
		linkgroup_info[0].bySimilarity = similarity.byteValue();

		pReserved.nLinkGroupNum = 1;
		pReserved.stuLinkGroup = linkgroup_info;
		Pointer pInParam8 = new Memory(pReserved.size());
		pInParam8.clear(pReserved.dwSize);
		ToolKits.SetStructDataToPointer(pReserved, pInParam8, 0);
		net_analyse_rule_info[0].pReserved = pInParam8;

		NET_ANALYSE_RULE net_analyse_rule = new NET_ANALYSE_RULE();
		net_analyse_rule.stuRuleInfos = net_analyse_rule_info;
		net_analyse_rule.nRuleCount = 1;// 以一条规则为例

		pInParam.stuRuleInfo = net_analyse_rule;
		pInParam.write();

		// 出参
		NET_OUT_ADD_ANALYSE_TASK pOutParam = new NET_OUT_ADD_ANALYSE_TASK();

		boolean aBoolean = netSdk.CLIENT_AddAnalyseTask(loginHandle,
				NetSDKLib.EM_DATA_SOURCE_TYPE.EM_DATA_SOURCE_PUSH_PICFILE, pInParam.getPointer(), pOutParam, 30000);
		if (!aBoolean) {

			System.out.println("创建任务错误码：" + netSdk.CLIENT_GetLastError());
		} else {
			taskId = pOutParam.nTaskID;
			System.out.println("解析任务创建成功：taskId=" + taskId);
		}
	}

	LLong AttachHandle = null;

	/**
	 * 2-订阅分析结果
	 */
	public void attachAnalyseTaskResult() {
		NetSDKLib.NET_IN_ATTACH_ANALYSE_RESULT pInParam4 = new NetSDKLib.NET_IN_ATTACH_ANALYSE_RESULT();
		pInParam4.nTaskIDs[0] = taskId;
		pInParam4.nTaskIdNum = 1;
		NetSDKLib.NET_ANALYSE_RESULT_FILTER result_filter = new NetSDKLib.NET_ANALYSE_RESULT_FILTER();
		result_filter.nEventNum = 1;
		result_filter.dwAlarmTypes[0] = NetSDKLib.EVENT_IVS_FACERECOGNITION;
		pInParam4.cbAnalyseTaskResult = FaceAnalyseTaskResultCallBack.getInstance();
		pInParam4.stuFilter = result_filter;
		LLong lLong1 = netSdk.CLIENT_AttachAnalyseTaskResult(loginHandle, pInParam4, 30000);
		if (lLong1.longValue() == 0) {
			System.err.println("订阅lLong1：" + lLong1);
			System.err.println("订阅错误码：" + netSdk.CLIENT_GetLastError());
		} else {
			LLong AttachHandle = lLong1;
			System.out.println("订阅成功taskId={}:" + taskId);
		}
	}

	public void DetachAnalyseTaskResult() {
		if (netSdk.CLIENT_DetachAnalyseTaskState(AttachHandle)) {
			System.out.println("DetachAnalyseTaskResult Succeed!");
		} else {
			System.err.printf("DetachAnalyseTaskResult Failed!Last Error[0x%x]\n", netSdk.CLIENT_GetLastError());
			return;
		}
	}

	// 创建一个容量为40的线程池
	static ExecutorService executorService = Executors.newFixedThreadPool(64);

	/**
	 * 3-目标识别结果回调
	 */
	public static class FaceAnalyseTaskResultCallBack implements NetSDKLib.fAnalyseTaskResultCallBack {

		private FaceAnalyseTaskResultCallBack() {
		}

		private static class CallBackHolder {
			private static FaceAnalyseTaskResultCallBack instance = new FaceAnalyseTaskResultCallBack();
		}

		public static FaceAnalyseTaskResultCallBack getInstance() {
			return FaceAnalyseTaskResultCallBack.CallBackHolder.instance;
		}

		public static NetSDKLib.NET_CB_ANALYSE_TASK_RESULT_INFO resultInfo = new NetSDKLib.NET_CB_ANALYSE_TASK_RESULT_INFO();

		// public static NetSDKLib.DEV_EVENT_FACERECOGNITION_INFO
		// dev_event_facerecognition_info = new
		// NetSDKLib.DEV_EVENT_FACERECOGNITION_INFO();

		@Override
		public int invoke(LLong lAttachHandle, Pointer pstAnalyseTaskResult, Pointer pBuf, int dwBufSize,
				Pointer dwUser) {
			// 获取数据集
			byte [] a= pstAnalyseTaskResult.getByteArray(0, resultInfo.size());			
			executorService.submit(new Runnable() {
				@Override
				public void run() {
					new FaceAanlysisTaskDemo().jiexi(a);
				}
			});			
			return 0;
		}
	}
	
    //////////////////////////////// 静态池 ////////////////////////////////
    // 用阻塞队列模拟一个静态对象池

    // 设置一个队列模拟静态池，容量看情况改，越大可同时处理的事件就越多，占用内存也越多
    private final static int MAX_TASK_COUNT = 20;   // 队列容量
    private final static LinkedBlockingDeque<NetSDKLib.DEV_EVENT_FACERECOGNITION_INFO> faceReconPool = new LinkedBlockingDeque<>(MAX_TASK_COUNT);

    static {
        // 初始化队列
        for (int i = 0; i < MAX_TASK_COUNT; i++) {
            faceReconPool.offer(new NetSDKLib.DEV_EVENT_FACERECOGNITION_INFO());
        }
    }


	// public static NetSDKLib.NET_CB_ANALYSE_TASK_RESULT_INFO resultInfo = new NetSDKLib.NET_CB_ANALYSE_TASK_RESULT_INFO();
	// public static NetSDKLib.DEV_EVENT_FACERECOGNITION_INFO dev_event_facerecognition_info = new NetSDKLib.DEV_EVENT_FACERECOGNITION_INFO();
	public void jiexi(byte[] a) {		
		NetSDKLib.NET_CB_ANALYSE_TASK_RESULT_INFO resultInfo = new NetSDKLib.NET_CB_ANALYSE_TASK_RESULT_INFO();
		//NetSDKLib.DEV_EVENT_FACERECOGNITION_INFO dev_event_facerecognition_info = new NetSDKLib.DEV_EVENT_FACERECOGNITION_INFO();
		ToolKits.ByteArrToStructure(a, resultInfo);
		NetSDKLib.NET_ANALYSE_TASK_RESULT[] stuTaskResultInfos = resultInfo.stuTaskResultInfos;
		NetSDKLib.NET_SECONDARY_ANALYSE_EVENT_INFO[] stuEventInfos = stuTaskResultInfos[0].stuEventInfos;
		Pointer pstEventInfo = stuEventInfos[0].pstEventInfo;
		NetSDKLib.DEV_EVENT_FACERECOGNITION_INFO msg = null;
		NetSDKLib.NET_TIME_EX UTC = null;                              // 抓拍时间
        try {
        	 // 从静态池取
			msg = faceReconPool.take();
	        // 抓拍时间
	        UTC = (NetSDKLib.NET_TIME_EX) GetSelectedSingleFieldValue("UTC", msg, pstEventInfo);
	        System.out.println(" UTC:"+UTC.toString());
	       NetSDKLib.CANDIDATE_INFOEX[] stuCandidates =msg.stuCandidatesEx;
	        NetSDKLib.FACERECOGNITION_PERSON_INFOEX stPersonInfo = stuCandidates[0].stPersonInfo;
	        // 匹配数量
	        int nCandidateNum = (int)GetSelectedSingleFieldValue("nCandidateNum", msg, pstEventInfo);
	        System.out.println(" nCandidateNum:"+nCandidateNum);
	        
			//ToolKits.GetPointerData(pstEventInfo, msg);
			//NetSDKLib.CANDIDATE_INFO[] stuCandidates =msg.stuCandidates;
			//NetSDKLib.FACERECOGNITION_PERSON_INFO stPersonInfo = stuCandidates[0].stPersonInfo;
		   // String sex = Byte.toString(stPersonInfo.bySex);
	        //String sex1 = "1".equals(sex) ? "男" : "女";
			System.out.println("处理----" + System.currentTimeMillis() + " szFileID:" + new String(stuTaskResultInfos[0].szFileID));
		} catch (InterruptedException e) {
            System.err.println("静态池错误: " + e.getLocalizedMessage());
        } catch (Exception e) {
            System.err.println("数据获取异常：" + e.getLocalizedMessage());
        } finally {
            if (msg != null) {
                faceReconPool.offer(msg);    // 重新放回静态池
            }
        }   

	}

	/**
	 * 4-推送人脸图片
	 */
	public void pushAnalysePictureFile() {
		System.out.println("startTime------------:" + System.currentTimeMillis());
		NET_IN_PUSH_ANALYSE_PICTURE_FILE analysePictureFile_in = new NET_IN_PUSH_ANALYSE_PICTURE_FILE();
		System.out.println(" taskId:" + taskId);
		analysePictureFile_in.nTaskID = taskId;
		analysePictureFile_in.nPicNum = 1;
		String str = "D:\\296.jpg";
		int totalLen = 0; // 这是总的图片缓冲区长度
		byte[] testFileBuffer1 = ToolKits.readPictureToByteArray(str);
		String fileId = "666";
		System.out.println("人脸解析任务：fileId=" + fileId);

		byte[] fileId1 = fileId.getBytes();
		System.arraycopy(fileId1, 0, analysePictureFile_in.stuPushPicInfos[0].szFileID, 0, fileId1.length);
		System.out.println("testFileBuffer1:" + testFileBuffer1.length);
		if (testFileBuffer1 != null) {
			analysePictureFile_in.stuPushPicInfos[0].nLength = testFileBuffer1.length;
			// 偏移量，有多张图时这里要特别注意
			analysePictureFile_in.stuPushPicInfos[0].nOffset = totalLen;
			// 总的长度是各个图的累加
			totalLen = totalLen + analysePictureFile_in.stuPushPicInfos[0].nLength;
		}
		analysePictureFile_in.nBinBufLen = totalLen;
		// 分配内存
		analysePictureFile_in.pBinBuf = new Memory(totalLen);
		// 清理内存
		analysePictureFile_in.pBinBuf.clear(totalLen);

		if (testFileBuffer1 != null) {
			// 第一张图写入缓存
			analysePictureFile_in.pBinBuf.write(analysePictureFile_in.stuPushPicInfos[0].nOffset, testFileBuffer1, 0,
					analysePictureFile_in.stuPushPicInfos[0].nLength);
		}

		NET_OUT_PUSH_ANALYSE_PICTURE_FILE analysePictureFile_out = new NET_OUT_PUSH_ANALYSE_PICTURE_FILE();

		boolean aBoolean1 = netSdk.CLIENT_PushAnalysePictureFile(loginHandle, analysePictureFile_in,
				analysePictureFile_out, 30000);
		if (!aBoolean1) {
			System.out.println("推送图片任务失败！");
			System.out.println("推图错误码：" + netSdk.CLIENT_GetLastError());
		} else {
			System.out.println("推图成功！！!");
		}
	}

	/**
	 * 4-推送人脸图片
	 */
	public void pushAnalysePictureFile1(String str1, String fileId6) {
		System.out.println("startTime------------:" + System.currentTimeMillis());
		NET_IN_PUSH_ANALYSE_PICTURE_FILE analysePictureFile_in = new NET_IN_PUSH_ANALYSE_PICTURE_FILE();
		System.out.println(" taskId:" + taskId);
		analysePictureFile_in.nTaskID = taskId;
		analysePictureFile_in.nPicNum = 1;
		String str = str1;
		int totalLen = 0; // 这是总的图片缓冲区长度
		byte[] testFileBuffer1 = ToolKits.readPictureToByteArray(str);
		String fileId = fileId6;
		System.out.println("人脸解析任务：fileId=" + fileId);

		byte[] fileId1 = fileId.getBytes();
		System.arraycopy(fileId1, 0, analysePictureFile_in.stuPushPicInfos[0].szFileID, 0, fileId1.length);
		System.out.println("testFileBuffer1:" + testFileBuffer1.length);
		if (testFileBuffer1 != null) {
			analysePictureFile_in.stuPushPicInfos[0].nLength = testFileBuffer1.length;
			// 偏移量，有多张图时这里要特别注意
			analysePictureFile_in.stuPushPicInfos[0].nOffset = totalLen;
			// 总的长度是各个图的累加
			totalLen = totalLen + analysePictureFile_in.stuPushPicInfos[0].nLength;
		}
		analysePictureFile_in.nBinBufLen = totalLen;
		// 分配内存
		analysePictureFile_in.pBinBuf = new Memory(totalLen);
		// 清理内存
		analysePictureFile_in.pBinBuf.clear(totalLen);

		if (testFileBuffer1 != null) {
			// 第一张图写入缓存
			analysePictureFile_in.pBinBuf.write(analysePictureFile_in.stuPushPicInfos[0].nOffset, testFileBuffer1, 0,
					analysePictureFile_in.stuPushPicInfos[0].nLength);
		}

		NET_OUT_PUSH_ANALYSE_PICTURE_FILE analysePictureFile_out = new NET_OUT_PUSH_ANALYSE_PICTURE_FILE();

		boolean aBoolean1 = netSdk.CLIENT_PushAnalysePictureFile(loginHandle, analysePictureFile_in,
				analysePictureFile_out, 30000);
		if (!aBoolean1) {
			System.out.println("推送图片任务失败！");
			System.out.println("推图错误码：" + netSdk.CLIENT_GetLastError());
		} else {
			System.out.println("推图成功！！!");
		}
	}

	public void pushs() {
		for (int i = 0; i < 64; i++) {
			String str = "D:\\296.jpg";
			String fileId = (i + 1) + "";
			pushAnalysePictureFile1(str, fileId);

		}
	}

	public void RunTest() {
		System.out.println("Run Test");
		CaseMenu menu = new CaseMenu();
		menu.addItem((new CaseMenu.Item(this, "添加智能分析任务测试", "addAnalyseTask")));
		menu.addItem((new CaseMenu.Item(this, "智能分析结果", "attachAnalyseTaskResult")));
		menu.addItem((new CaseMenu.Item(this, "取消智能分析订阅", "DetachAnalyseTaskResult")));
		// menu.addItem((new CaseMenu.Item(this, "推送智能分析图片文件和规则信息",
		// "pushAnalysePictureFile")));
		menu.addItem((new CaseMenu.Item(this, "批量推图", "pushs")));
		menu.run();
	}

	public static void main(String[] args) {
		FaceAanlysisTaskDemo analyseTaskDemo = new FaceAanlysisTaskDemo();
		InitTest("172.12.5.112", 37777, "admin", "admin123");
		analyseTaskDemo.RunTest();
		LoginOut();
	}

}
