package com.microservice.base.task;

import com.alibaba.fastjson.JSONObject;
import com.microservice.base.common.utils.HttpClientUtil;
import com.microservice.base.entity.*;
import com.microservice.base.mapper.hikcommon.*;
import com.microservice.base.service.TbOrgSV;
import com.microservice.base.service.TbPersonSV;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.interceptor.TransactionAspectSupport;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

/**
 * @author jbx
 * @package_name com.microservice.base.task
 * @create_date 2020/12/1 9:24
 */
@Component
@Transactional
@EnableScheduling
public class syncData {
  @Autowired
  CommonMP commonMP;
  @Autowired
  InfoStudentMP infoStudentMP;
  @Autowired
  InfoTeacherMP infoTeacherMP;
  @Autowired
  com.microservice.base.mapper.hikcommon.SysTreeviewMP SysTreeviewMP;
  @Autowired UserMapper userMapper;
  @Autowired TreeviewUserMP treeviewUserMP;
  @Autowired TbOrgSV tbOrgSV;
  @Autowired TbPersonSV personSV;
  @Autowired
  FaceMP faceMP;
  @Autowired HikInterfacePush hikInterfacePush;
  Logger logger = LoggerFactory.getLogger(syncData.class);

  @Value("${schoolcode}")
  private String schoolCode;

  @Scheduled(cron = "0 0/5 * * * ? ")
  public String sync() throws InterruptedException {
    long startTime = System.currentTimeMillis();
    logger.error("开始同步common数据...");
    try {
      // 云端拉取数据,同步到本地数据库
      syncTable("c_info_teacher", InfoTeacher.class);
      syncTable("c_info_student", InfoStudent.class);
      syncTable("sys_user", User.class);
      syncTable("treeview_user", TreeviewUser.class);
      syncTable("sys_treeview", SysTreeview.class);
      long endTime = System.currentTimeMillis();
      logger.error("common数据同步完成,耗时:" + (endTime - startTime) + "ms");
    } catch (Exception e) {
      logger.error("common数据同步失败");
      return e + "执行定时任务失败";
    }
    // 将本地已同步的数据库进行进行海康影子数据库的数据清洗,转变

    // 将sys_treeview表中的内容与tb_org中的的内容进行转换
    // 进行全量增量的判断
    logger.error("开始清洗数据....");
    long startTimeQing = System.currentTimeMillis();
    if (tbOrgSV.queryAll().size() == 0) {
      try {
        tbOrgSV.TreetoOrg("0");
        logger.error("全量同步org库成功");
        // 调用海康接口,进行同步,此处sync_flag起作用,海康接口同步结束后进行sync_flag=0
        // 因为是全量,只需要将所有的数据增加进去即可
      } catch (Exception e) {
        TransactionAspectSupport.currentTransactionStatus().setRollbackOnly();
        logger.error(e + "全量同步org库出现异常");
      }
    } else {
      try {
        tbOrgSV.TreetoOrg("1");
        logger.error("增量同步org库成功");
        // 调用海康接口进行同步时,此处sync_flag起作用,海康接口同步结束后进行sync_flag=0
        // 此处需要根据sync_flag=1或者sync_flag=2进行处理
        // 若为1则进行更新或者删除,若为2,则进行新增
      } catch (Exception e) {
        TransactionAspectSupport.currentTransactionStatus().setRollbackOnly();
        logger.error(e + "增量同步org库出现异常");
      }
    }
    //     将person进行清洗转换,同时进行face表的转化
    if (personSV.queryAll().size() == 0) {
      try {
        personSV.userToPerson("0");
        logger.error("全量同步person库和face库成功");
        // 全量之后也需要去推送至海康的接口,调用批量增加的接口
      } catch (Exception e) {
        TransactionAspectSupport.currentTransactionStatus().setRollbackOnly();
        logger.error(e + "全量同步person库和face库失败");
      }
    } else {
      try {
        personSV.userToPerson("1");
        logger.error("增量同步person库和face库成功");
        // 增量推送到海康的接口
      } catch (Exception e) {
        TransactionAspectSupport.currentTransactionStatus().setRollbackOnly();
        logger.error(e + "增量同步person库和face库失败");
      }
    }
    long endTimeQing = System.currentTimeMillis();
    logger.error("数据清洗完成,耗时:" + (endTimeQing - startTimeQing) + "ms");
    // 此处的逻辑需要理清:
    // 1.组织的新增:需要先将父节点添加,才能添加子节点,人员的新增,人员的删除,人员的更新没问题
    // 但是如果涉及组织的删除,该组织下的人员必须首先进行删除,而且如果涉及上层组织结点的删除,需要先将人员删除,再将下级组织删除,最后将该组织结点删除
    // 故代码逻辑为
    // 1.人员的更新(先删再更新),更新人员的时候,组织的id并不会被删除,只可能被改变
    // 2.组织的更新(先删再更新)
    // 3.组织的新增(涉及人员的org_id)
    // 4.人员的新增
    // 5.人脸的更新(先删再更新),人脸的新增
    try {
      hikInterfacePush.orgBatchAdd();
      hikInterfacePush.personBatchAdd();
      hikInterfacePush.faceAdd();
      hikInterfacePush.PersonUpdate();
      hikInterfacePush.orgUpdate();
      hikInterfacePush.faceUpdate();
    } catch (Exception e) {
      TransactionAspectSupport.currentTransactionStatus().setRollbackOnly();
      logger.error(e + "同步海康数据接口出现异常");
    }
    try {
      // 将监控点信息和区域信息拉取到本地数据库
      hikInterfacePush.cameraAdd();
      hikInterfacePush.regionAdd();
    } catch (Exception e) {
      TransactionAspectSupport.currentTransactionStatus().setRollbackOnly();
      logger.error(e + "拉取海康监控点,区域数据出现异常");
    }
    logger.error("定时任务执行成功");
    return "执行定时任务成功";
  }

  /**
   * 进行同步本地库的代码
   *
   * @param tableName
   * @param clazz
   */
  private void syncTable(String tableName, Class clazz) {
    HashMap hashMap = new HashMap<>();
    hashMap.put("tableName", tableName);
    hashMap.put("schoolCode", schoolCode);
    SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    String sql = "";
    switch (tableName) {
      case "c_info_teacher":
        sql = "select max(update_time) as maxUpdateTime from c_info_teacher";
        break;
      case "c_info_student":
        sql = "select max(update_time) as maxUpdateTime  from c_info_student";
        break;
      case "sys_user":
        sql = "select max(update_time) as maxUpdateTime from sys_user";
        break;
      case "sys_treeview":
        sql = "select max(update_time) as maxUpdateTime from sys_treeview";
        break;
      case "treeview_user":
        sql = "select max(update_time) as maxUpdateTime from treeview_user";
        break;
      default:
        break;
    }

    List list = new ArrayList();
    Map<String, Object> map = commonMP.executeSqlForMap(sql);
    if (map != null && map.size() != 0) {
      String maxUpdateTime = simpleDateFormat.format(map.get("maxUpdateTime"));
      hashMap.put("updateTime", maxUpdateTime);
    }
    try {
      String s =
          HttpClientUtil.get("http://ms.do-ok.com:18010/inner/synchronizationDb/v1/getDb", hashMap);
      JSONObject jsonObject = JSONObject.parseObject(s);
      if (jsonObject == null) {
        logger.error("连接ms失败未能成功从云端获取数据");
      }
      list = jsonObject.getJSONArray("data").toJavaList(clazz);
    } catch (Exception e) {
      TransactionAspectSupport.currentTransactionStatus().setRollbackOnly();
      logger.error(tableName + "表拉取失败,您的网络出现问题");
    }
    importData(list, tableName);
    list.clear();
  }

  /**
   * 全量更新
   *
   * @param list
   * @param tableName
   */
  public void importData(List list, String tableName) {
    if (list != null && list.size() != 0) {
      try {
        int index = list.size() / 1000;
        for (int i = 0; i <= index; i++) {
          switch (tableName) {
            case "c_info_teacher":
              // stream流表达式，skip表示跳过前i*100条记录，limit表示读取当前流的前100条记录
              infoTeacherMP.saveBatch(
                  (List<InfoTeacher>)
                      list.stream().skip(i * 1000).limit(1000).collect(Collectors.toList()));
              break;
            case "c_info_student":
              infoStudentMP.saveBatch(
                  (List<InfoStudent>)
                      list.stream().skip(i * 1000).limit(1000).collect(Collectors.toList()));
              break;
            case "sys_user":
              userMapper.saveBatch(
                  (List<User>)
                      list.stream().skip(i * 1000).limit(1000).collect(Collectors.toList()));
              break;
            case "sys_treeview":
              SysTreeviewMP.saveBatch(
                  (List<SysTreeview>)
                      list.stream().skip(i * 1000).limit(1000).collect(Collectors.toList()));
              break;
            case "treeview_user":
              treeviewUserMP.saveBatch(
                  (List<TreeviewUser>)
                      list.stream().skip(i * 1000).limit(1000).collect(Collectors.toList()));
              break;
            default:
              break;
          }
        }
      } catch (Exception e) {
        TransactionAspectSupport.currentTransactionStatus().setRollbackOnly();
        logger.error(tableName + "表同步失败,请稍后再试");
      }
    }
    logger.error(tableName + "表同步成功");
  }
}
