package com.baidu.ai.aip.solution;

import com.baidu.ai.aip.utils.Base64Util;
import com.baidu.ai.aip.utils.FileUtil;
import com.baidu.ai.aip.utils.GsonUtils;
import com.baidu.ai.aip.utils.HttpUtil;
import com.etc.test.AuthService;
//import net.sf.json.JSONArray;
//import net.sf.json.JSONObject;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.json.JSONArray;
import org.json.JSONObject;
import org.springframework.stereotype.Component;

/**
 * 百度AIP图像组合API
 */
@Component
public class ImgCensor {

	public static String checkImg(String filePath) {
		// 图像组合APIurl(请求的api地址)
				String imgCensorUrl = "https://aip.baidubce.com/api/v1/solution/direct/img_censor";
				try {
					// 请求参数的设置
					Map<String, Object> sceneConf = new HashMap<String, Object>();
					Map<String, Object> ocrConf = new HashMap<String, Object>();
					ocrConf.put("recognize_granularity", "big");
					ocrConf.put("language_type", "CHN_ENG");
					ocrConf.put("detect_direction", true);
					ocrConf.put("detect_language", true);
					sceneConf.put("ocr", ocrConf);
					//将所需要鉴别的类型存在集合中
					Map<String, Object> input = new HashMap<String, Object>();
					List<Object> scenes = new ArrayList<Object>();
					// 政治敏感识别
					scenes.add("politician");
					// 色情识别
					scenes.add("antiporn");
					// 暴恐识别
					scenes.add("terror");
					//读取图片路径，并将图片用Base64Util工具类编码成接口所需的格式
					byte[] imgData = FileUtil.readFileByBytes(filePath);
					String imgStr = Base64Util.encode(imgData);
					input.put("image", imgStr);
					input.put("scenes", scenes);
					input.put("sceneConf", sceneConf);
					String params = GsonUtils.toJson(input);
					
					 //调用方法获取线上环境accessToken的值， 该参数是接口服务必须的参数。
					
					String accessToken = AuthService.getAuth();
					String result = HttpUtil.post(imgCensorUrl, accessToken, "application/json", params);
					//JSONObject obj = JSONObject.fromObject(result);
					JSONObject obj=new JSONObject(result);
					// 色情判断
					JSONArray antiporn = obj.getJSONObject("result").getJSONObject("antiporn").getJSONArray("result");
					//保留小数点后两位
					DecimalFormat df = new DecimalFormat("#0.00");
					double probability = antiporn.getJSONObject(1).getDouble("probability");
					double num = Double.parseDouble(df.format(probability));
					if (num > 0.75) {
						return "该图片包含色情信息";
					}

					// 政治敏感判断
					String people = obj.getJSONObject("result").getJSONObject("politician").getString("include_politician");
					if (people.equals("是")) {
						return "该图片包含敏感政治信息";
					}

					// 暴恐判断
					JSONArray result_coarse = obj.getJSONObject("result").getJSONObject("terror").getJSONArray("result_coarse");
					double score = result_coarse.getJSONObject(0).getDouble("score");
					double newscore = Double.parseDouble(df.format(score));
					if (newscore < 0.65) {
						return "该图片包含暴力或恐怖信息";
					}

				} catch (Exception e) {
					e.printStackTrace();
				}
				return "正常";
			}
		
	
	
	
	
}
