package com.zhr.rhdb.etl;

import cn.hutool.core.date.DateField;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUnit;
import cn.hutool.core.io.FileUtil;
import cn.hutool.json.JSONUtil;
import com.smartframework.web.BaseRunner;
import com.smartframework.web.core.Constants;
import com.smartframework.web.core.http.RestClient;
import com.smartframework.web.core.util.*;

import java.util.*;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.stream.Collectors;

/**
 * @author Java Liu
 */
public class DataCleanMain {
    public static void main(String[] args) throws Exception {
        if (args.length < 2) {
            System.out.println("required two date parameter,like:\njava -jar rhdb-etl.jar 2023-02-01 2023-02-07");
            return;
        }
        BaseRunner.init(DataCleanMain.class, args);
        final DateTime startDate = DateUtil.parseDateTime(String.format("%s 00:00:00", args[0])).offset(DateField.HOUR_OF_DAY, -2);
        final DateTime endDate = DateUtil.parseDateTime(String.format("%s 00:00:00", args[1])).offset(DateField.HOUR_OF_DAY, 1);
        System.out.println("startTime:" + startDate.getTime() + ",endTime:" + endDate.getTime() + "interval:" + ((endDate.getTime() - startDate.getTime()) / 1000));
        SimpleJdbcClient ipowerClient = SimpleJdbcClient.create("ipower");
        List<Map<String, Object>> pntList = ipowerClient.queryForList("select a.id measure_point_id,a.name measure_point_name,b.mped_id scada_point_id,a.statistic_type,a.data_type_id,b.protocol_type from ami_ba_measure_point a left join ami_ba_meter b on a.meter_id = b.id where a.data_source=0 and (a.data_type_id=1 or a.data_type_id=2) and (b.protocol_type=7 or b.protocol_type=8) and mped_id !='-1'");
//        List<Map<String, Object>> pntList = ipowerClient.queryForList("select measure_point_id, measure_point_name, meter_id, scada_point_id from scada_measure_link");
        final String deleteSql = String.format("delete from ami_da_raw_data where measure_point_id = %%s and save_time>'%s 00:00:00' and save_time<'%s 00:00:00';", args[0], args[1]);
        final String insertSql = "insert into ami_da_raw_data(id,measure_point_id, data_type_id, cycle, data_time, sub_serial, raw_value, raw_value_flag, ratio, cal_value, valid_flag, add_value,  save_time, collector_id, is_deleted, insert_time) values (%s,%s, %s, 15, '%s 12:44:18', 20, %s,1, 1, 0, 1, 0,  '%s 12:44:18', 'ami-processor-scada-etl', 0, null);";
        List<String> sqlList = new ArrayList<>();
        List<String> tagNames = new ArrayList<>();
        Map<String, String> scadaPntWithMeasurePntIdMap = new HashMap<>();
        Map<String, Object> scadaPntWithMeasurePntTypeMap = new HashMap<>();
        pntList.forEach(stringObjectMap -> {
            String measurePointId = String.valueOf(stringObjectMap.get("measurePointId"));
            String scadaPointId = String.valueOf(stringObjectMap.get("scadaPointId"));
            if (ParameterUtils.isEmptyOrNull(measurePointId)) {
                return;
            }
            //开始读取数据并依次处理
            String tagName = null;
            if (scadaPointId.startsWith("1101")) {
                tagName = String.format("scada.pnt_acc.%s.value", scadaPointId);
            } else if (scadaPointId.startsWith("1102")) {
                tagName = String.format("scada.pnt_ana.%s.value", scadaPointId);
            } else {
                return;
            }
            tagNames.add(tagName);
            scadaPntWithMeasurePntIdMap.put(tagName, measurePointId);
            scadaPntWithMeasurePntTypeMap.put(tagName, stringObjectMap.get("dataTypeId"));
        });
        System.out.println(String.format("there are:%d points to be sync", tagNames.size()));
        Map<String, Object> requestBody = new HashMap<>();
        requestBody.put("tagNames", tagNames);
        requestBody.put("startTime", startDate.getTime());
        requestBody.put("endTime", endDate.getTime());
        requestBody.put("interval", endDate.getTime() - startDate.getTime());
        requestBody.put("cmode", "RHDB_STA_NORMAL");
        RestClient rhdbClient = new RestClient(Constants.getString("rhdb.proxy.url"));
        Object data = rhdbClient.post("/pnt/history.do", new HashMap<String, Object>() {{
            put("data", requestBody);
        }}).getContentMapResult().get("data");
        if (ParameterUtils.isEmptyOrNull(data)) {
            System.out.println("rhdb result is empty!");
            return;
        }
        System.out.println(JSONUtil.toJsonPrettyStr(data));

        List<PointValue> pointValues = JsonUtil.getBeanListByJsonArray(JsonUtil.getJsonByObject(data), PointValue.class);
        System.out.println("pnt result for tagName count :" + tagNames.size() + ", normal value count:" + pointValues.size());

        scadaPntWithMeasurePntIdMap.forEach(new BiConsumer<String, String>() {
            @Override
            public void accept(String s, String s2) {
                sqlList.add(String.format(deleteSql, s2));
            }
        });

        pointValues.forEach(new Consumer<PointValue>() {
            @Override
            public void accept(PointValue pointValue) {
                tagNames.remove(pointValue.getPointId());
            }
        });
        System.out.println("pnt error tagNames:" + tagNames);
        Map<String, List<PointValue>> pointIdValueListMap = pointValues.stream().collect(Collectors.groupingBy(PointValue::getPointId, LinkedHashMap::new, Collectors.toList()));
        Map<String, Double> pntDiffValueMap = new HashMap<>();
        pointIdValueListMap.forEach(new BiConsumer<String, List<PointValue>>() {
            @Override
            public void accept(String tagName, List<PointValue> pointValues) {
                if (pointValues.size() > 1) {
                    pntDiffValueMap.put(tagName, pointValues.get(pointValues.size() - 1).getValue() - pointValues.get(0).getValue());
                }
            }
        });
        //计算天数
        final DateTime dateTime = DateUtil.parse(args[0]);
        //因为startDate被处理为向前2 hours
        long totalCount = DateUtil.between(dateTime, endDate, DateUnit.DAY);

        pntDiffValueMap.forEach(new BiConsumer<String, Double>() {
            @Override
            public void accept(String s, Double aDouble) {
                System.out.println("pnt:" + s + ", has consume:" + aDouble);
                String everyDayValue = String.valueOf(aDouble / totalCount);
                for (int i = 0; i < totalCount; i++) {
                    sqlList.add(String.format(insertSql,
                            SnowFlakeIdUtil.getInstance().nextId(),
                            scadaPntWithMeasurePntIdMap.get(s),
                            scadaPntWithMeasurePntTypeMap.get(s),//设置data_type_id
                            DateUtil.offset(dateTime, DateField.DAY_OF_YEAR, i).toDateStr(),
                            everyDayValue,
                            DateUtil.offset(dateTime, DateField.DAY_OF_YEAR, i).toDateStr()));
                }
            }
        });
        sqlList.add(String.format("-- measure point size:%d,every point %d row inserts.", pntDiffValueMap.size(), totalCount));
        tagNames.forEach(new Consumer<String>() {
            @Override
            public void accept(String s) {
                sqlList.add(String.format("-- %s", s));
            }
        });
        FileUtil.writeUtf8Lines(sqlList, Constants.makeFullPath("rhdb-clean.sql"));
    }
}
