package com.example.task;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

import javax.annotation.PostConstruct;
import javax.annotation.Resource;

import HslCommunication.BasicFramework.SoftBasic;
import HslCommunication.Core.Transfer.IByteTransform;
import HslCommunication.Profinet.Siemens.SiemensS7Net;
import com.alibaba.excel.util.StringUtils;
import com.example.config.ClientGen;
import com.example.constant.OpcConstant;
import com.example.entity.DbBlock;
import com.example.entity.DbBlockDto;
import com.example.enums.TypeEnum;
import com.example.event.OpcMessageEvent;
import com.example.exception.OpcException;
import com.example.s7.PLCUtils;
import com.example.s7.PlcSectionTwoConnectorFactory;
import com.example.service.DbBlockService;
import com.example.strategy.PLCDataConvertStrategyFactory;
import com.example.utils.HiveUtils;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;

import org.eclipse.milo.opcua.sdk.client.OpcUaClient;
import org.eclipse.milo.opcua.stack.core.types.builtin.DataValue;
import org.eclipse.milo.opcua.stack.core.types.builtin.NodeId;
import org.eclipse.milo.opcua.stack.core.types.enumerated.TimestampsToReturn;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

/**
 * Copyright: Copyright (c) 2022
 *
 * <p>
 * Description:
 *
 * @author LiuFeng
 * @version 2022/11/16 11:20
 * @since
 */
@RestController
@Component
@EnableScheduling
@Slf4j
public class OpcTask {

    @Resource
    private DbBlockService dbBlockService;

    @Resource
    private ApplicationEventPublisher eventPublisher;

    /**
     * 存储一标段DB块的数据
     */
    List<NodeId> sectionOneBlocks = null;

    /**
     * 存储二标段DB块的数据
     */
    List<NodeId> sectionTwoBlocks = null;

    private ObjectMapper mapper;

    private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");


    /**
     * 读取DB块的数据
     */
    @PostConstruct
    public void init() {
        mapper = new ObjectMapper();
        if (sectionTwoBlocks == null) {
            // 读取一标段DB块的数据，并拼接成t|xxx的格式
            sectionOneBlocks = dbBlockService.readSectionOneInfo();
            // 读取二标段的数据，并拼接成t|xxx的格式
            sectionTwoBlocks = dbBlockService.readSectionTwoInfo1();
            sectionTwoBlocks.addAll(sectionOneBlocks);
        }
    }

    /**
     * 批量采集PLC数据（一和二标段）
     */
    @Scheduled(fixedRate = 1, timeUnit = TimeUnit.SECONDS)
    public void batchReadPLCData() {
        long startTime = System.nanoTime();
        try {
            // 采集PLC数据
            List<DbBlockDto> dbBlockDtoList = gatherSectionOneData(sectionTwoBlocks);
            // 发送kafka
            eventPublisher.publishEvent(new OpcMessageEvent(this, dbBlockDtoList));
            log.info("读取数据耗时：{} 毫秒, 数据量：{}", Duration.ofNanos(System.nanoTime() - startTime).toMillis(), dbBlockDtoList.size());
        } catch (ExecutionException e) {
            throw new RuntimeException(e);
        } catch (InterruptedException e) {
            throw new RuntimeException(e);
        }
    }

    /**
     * 每隔10min存储plc数据到hive
     */
    @Scheduled(fixedRate = 10, timeUnit = TimeUnit.MINUTES)
    public void savePLCData2Hive() {
        long startTime = System.nanoTime();
        try {
            log.info("待读取的opc变量个数为：{}", sectionTwoBlocks.size());
            // 采集PLC数据（一和二标段）
            List<DbBlockDto> dbBlockDtoList = gatherSectionOneData(sectionTwoBlocks);
            // 存hive库
            save2Hive(dbBlockDtoList);
            log.info("完成hive存储，耗时：{} 毫秒, 数据量：{}", Duration.ofNanos(System.nanoTime() - startTime).toMillis(), dbBlockDtoList.size());
        } catch (ExecutionException e) {
            throw new RuntimeException(e);
        } catch (InterruptedException | SQLException e) {
            throw new RuntimeException(e);
        }
    }

    /**
     * 存hive库
     */
    public void save2Hive(List<DbBlockDto> dbBlockDtoList) throws SQLException {
        // 获取hive链接
        Connection conn = HiveUtils.getConn();
        log.info("hive数据库的conn 是：{}", conn);
        // 构造DDL
        PreparedStatement stmts = conn.prepareStatement("insert into gather_data" + " values(?,?,?)");
        stmts.setString(1, UUID.randomUUID().toString());
        stmts.setString(2, getMessage(dbBlockDtoList));
        stmts.setString(3, SIMPLE_DATE_FORMAT.format(new Date()));
        // 存库
        stmts.execute();
    }

    /**
     * 数据类型转换（List转成json）
     * @param dto
     * @return
     */
    private String getMessage(List<DbBlockDto> dto) {
        try {
            return mapper.writeValueAsString(dto);
        } catch (JsonProcessingException ex) {
            throw new OpcException(ex);
        }
    }

    /**
     * 采集一标段和二标段的数据
     */
    private List<DbBlockDto> gatherSectionOneData(List<NodeId> nodeIds) throws ExecutionException, InterruptedException {
        List<DbBlockDto> dtoList = new ArrayList<>();
        // 获取opc ua客户端
        OpcUaClient client = ClientGen.opcUaClient;
        log.info("client is: {}", client);
        // 链接opc ua
        client.connect().get();
        // 批量采集数据
        final CompletableFuture<List<DataValue>> data = client.readValues(0d, TimestampsToReturn.Neither, nodeIds);
        List<DataValue> dataValues = data.get();
        if (dataValues == null) {
            return dtoList;
        }
        // 封装数据
        for (int i = 0; i < dataValues.size(); i++) {
            DbBlockDto dbBlockDto = new DbBlockDto();
            DataValue value = dataValues.get(i);
            dbBlockDto.setName(nodeIds.get(i).getIdentifier().toString().substring(2));
            // 校验数据
            if(value.getStatusCode() != null){
                if (value.getStatusCode().isGood()) {
                    String plcValue = value.getValue().getValue().toString();
                    dbBlockDto.setValue(plcValue);
                }
            }
            dtoList.add(dbBlockDto);
        }
        return dtoList;
    }
}
