package com.yisa.person.job;

import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.google.common.collect.Lists;
import com.yisa.person.entity.FaceGroup;
import com.yisa.person.entity.Label;
import com.yisa.person.entity.PersonInfo;
import com.yisa.person.entity.Photo;
import com.yisa.person.service.FaceGroupService;
import com.yisa.person.utils.Base64Util;
import com.yisa.person.utils.HutoolUtil;
import lombok.extern.slf4j.Slf4j;
import net.anumbrella.seaweedfs.core.FileTemplate;
import net.anumbrella.seaweedfs.core.file.FileHandleStatus;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;

import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

@Component
@Slf4j
public class UpdatePersonJob {
    @Resource
    private FaceGroupService faceGroupService;

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;

    @Resource
    private HutoolUtil hutoolUtil;

    @Value("${kafka.topic}")
    private String topic;


    /**
     * 获取没有身份证信息的数据，根据personID调用接口，并将获取的信息重新更新到表中，每次5条
     */
    @Scheduled(initialDelay = 1000, fixedDelay = 5)
    public void sync() {
        log.warn("开始获取信息");

        //获取未曾补充过字段的数据
        List<FaceGroup> documentList = faceGroupService.findNotUpdateData();
        log.warn("获取到信息数量:{}", documentList.size());
        //最终更新到的mongo数据集
        List<JSONObject> personInfoList = new ArrayList<>(documentList.size());
        //最终发送到kafka的数据集
        List<PersonInfo> personInfoKafkaList = new ArrayList<>(documentList.size());


        // 每20个元素分割为一个list
        List<List<FaceGroup>> partition = Lists.partition(documentList, 20);
        for (List<FaceGroup> faceGroupList : partition) {
            List<Long> personIds = faceGroupList.stream()
                    .map(document -> Long.valueOf(document.getPersonId())).collect(Collectors.toList());
            Map<String, FaceGroup> personId2IdMap = faceGroupList.stream()
                    // 当出现重复key时，默认取老的
                    .collect(Collectors.toMap(FaceGroup::getPersonId, e -> e, (k1, k2) -> k1));

            //调用实名认证接口
            String result = hutoolUtil.post(personIds);


            if (StringUtils.isBlank(result)) {
                return;
            }

            //解析json字符串成JSON对象
            JSONObject jsonObject = JSONUtil.parseObj(result);
            if (CollectionUtils.isEmpty(jsonObject)) {
                return;
            }

            JSONObject responseStatusListObject = jsonObject.getJSONObject("ResponseStatusListObject");
            if (CollectionUtils.isEmpty(responseStatusListObject)) {
                return;
            }
            JSONArray responseStatusObject = responseStatusListObject.getJSONArray("ResponseStatusObject");
            if (CollectionUtils.isEmpty(responseStatusObject)) {
                return;
            }

            List<JSONObject> partitionList = new ArrayList<>(20);
            List<PersonInfo> personInfoPreKafkaList = new ArrayList<>(20);

            //遍历组装mongo更新字段
            for (int i = 0; i < responseStatusObject.size(); i++) {
                JSONObject object = responseStatusObject.getJSONObject(i);
                JSONObject personInfo = object.getJSONObject("PersonInfo");
                if (CollectionUtils.isEmpty(personInfo)) {
                    continue;
                }

                FaceGroup faceGroup = personId2IdMap.get(personInfo.getStr("PersonID"));

                Long id = faceGroup.getId();

                if (null != id) {
                    personInfo.set("id", id);
                    //这是要往mongo更新的数据集
                    partitionList.add(personInfo);

                    //组装kafka数据集
                    personInfoPreKafkaList.add(generateData(personInfo, faceGroup));
                }

            }

            log.warn("组装完毕partitionList:{}", partitionList.size());
            personInfoList.addAll(partitionList);
            personInfoKafkaList.addAll(personInfoPreKafkaList);
        }

        //批量回写信息
        log.warn("批量回写信息personInfoList:{}", personInfoList.size());
        faceGroupService.updateData(personInfoList);

        sendMessage(personInfoKafkaList);
    }


    public void sendMessage(List<PersonInfo> personInfoKafkaList) {

        for (PersonInfo personInfo : personInfoKafkaList) {
            //发送kafka
            String obj2String = com.alibaba.fastjson.JSONObject.toJSONString(personInfo);
            ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send(topic, obj2String);
            future.addCallback(new ListenableFutureCallback<SendResult<String, String>>() {
                @Override
                public void onFailure(Throwable throwable) {
                    //发送失败的处理
                    log.error(topic + " - 生产者 发送消息失败：{}", throwable);
                }

                @Override
                public void onSuccess(SendResult<String, String> stringObjectSendResult) {
                    //成功的处理
                    //log.info(topic + " - 生产者 发送消息成功：{}" ,stringObjectSendResult.getProducerRecord().value());
                }
            });
        }
    }



    private PersonInfo generateData(JSONObject jsonObject, FaceGroup faceGroup) {
        PersonInfo personInfo = new PersonInfo();
        personInfo.setId_type(111);
        personInfo.setId_number(faceGroup.getPersonnelIdNumber());
        personInfo.setName(faceGroup.getPersonnelName());
        personInfo.setGender(faceGroup.getGender());
        personInfo.setNation("");
        personInfo.setBirthday(faceGroup.getBirthday());
        personInfo.setFaith("00");
        personInfo.setMarital("90");
        personInfo.setEducation("90");
        personInfo.setHousehold_code(faceGroup.getHouseholdCode());
        personInfo.setHousehold_address(faceGroup.getHouseholdAddress());
        personInfo.setResidential_code(0);
        personInfo.setResidential_address("");
        personInfo.setNative_place_code(0);
        personInfo.setNative_place_address("");
        personInfo.setResidence_number("");
        personInfo.setHousehold_relation("");
        personInfo.setWork_unit("");
        personInfo.setPopulation_status(0);

        List<Photo> photos = new ArrayList<>();
        Photo p1 = new Photo(jsonObject.getStr("savePath"), 1, jsonObject.getStr("PersonPhotoData"),
                0, faceGroup.getTtamp(), 255, 255, 0, 0);
        photos.add(p1);
        personInfo.setPhotos(photos);

//        List<Label> labels = new ArrayList<>();
//        Label label = new Label(1, Arrays.asList("标签1", "标签2", "标签3"));
//        labels.add(label);
//        personInfo.setLabels(labels);

        personInfo.setSource(6);
        personInfo.setCountry("中国");
        personInfo.setForeign_name("");
//        personInfo.setAction("insert");

        return personInfo;
    }
}
