package org.example.template;


import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.api.java.UDF1;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import org.example.ch.InitKafka;
import org.example.ch.getProperties;
import org.example.dev.GeneralDataUtils;
import org.example.dev.SummaryModeResults;
import org.example.develop.StringUtilsUDF;

import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;


/**
 * Class description: HXD2机型 牵引变流器模型功能开发
 *
 * @author tiancy
 * @version 1.0
 * @since 2023/9/15
 */
public class QyblqHxd2Print {
    public static SparkSession spark = null;
    public static JavaSparkContext sc = null;

    public static void main(String[] args) throws IOException {
        SparkConf conf = new SparkConf()
                .setMaster("local[*]") // 设置运行模式，这里是本地模式
                .set("spark.executor.memory", "6g") // 设置executor内存大小
                .setAppName("HXD2_QYBLQ_MX")
                .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                .set("spark.driver.allowMultipleContexts", "true")
                .set("spark.hadoop.validateOutputSpecs", "false")
                .set("hive.mapred.supports.subdirectories", "true")
                .set("spark.streaming.backpressure.enabled", "true")
                .set("spark.streaming.kafka.maxRatePerPartition", "60")
                .set("spark.default.parallelism", "100")
                .set("spark.sql.shuffle.partitions", "20")
                .set("mapreduce.input.fileinputformat.input.dir.recursive", "true")
                .set("spark.scheduler.listenerbus.eventqueue.capacity", "100000")
                .set("spark.streaming.kafka.consumer.cache.enabled", "false")
                .set("spark.debug.maxToStringFields", "100");
        sc = new JavaSparkContext(conf);
        sc.setLogLevel("ERROR");
        JavaStreamingContext jsc = new JavaStreamingContext(sc, Durations.seconds(15));
        spark = SparkSession.builder().master("local").appName("hxd2_qyblq").getOrCreate();

        spark.udf().register("distinctString", new UDF1<String, String>() {
            public String call(String input) {
                return StringUtilsUDF.distinctString(input);
            }
        }, DataTypes.StringType);
        spark.udf().register("SummaryModeResults", new SummaryModeResults(), DataTypes.StringType);

        // 调用通用方法,获取 所有车的基本信息,这里关联结果形成宽表: 临时表名称: ps_gxzz_jcll_zx_jxxx_all_dim
//        GeneralDataUtils.getTrainAttributesDim(spark, "in ('HXD1C','HXD1D','HXD2','HXD3','HXD3C')", "in ('PJ00003')");
        GeneralDataUtils.getTrainAttributesDim(spark, "in ('HXD2')", "in ('PJ00003')");

        //读取配置文件中的kafka相关参数
        HashMap<String, Object> kafkaParams = InitKafka.init("HXD2 QYBLQ");

        //获取配置文件topic名称
        String tcds_tcms_fault_topic = getProperties.getPropertiesKey("tcds_tcms_bd_topic1", "config.properties");
        final HashSet<String> topics = new HashSet<>(Arrays.asList(tcds_tcms_fault_topic.split(",")));
        //消费kafka中topic数据
        JavaInputDStream<ConsumerRecord<String, String>> directStream = KafkaUtils.createDirectStream(
                jsc,
                LocationStrategies.PreferConsistent(),
                ConsumerStrategies.<String, String>Subscribe(topics, kafkaParams)
        );
        JavaDStream<String> lineValue = directStream.map(new Function<ConsumerRecord<String, String>, String>() {
            @Override
            public String call(ConsumerRecord<String, String> stringStringConsumerRecord) throws Exception {
                return stringStringConsumerRecord.value();
            }
        });

        lineValue.foreachRDD(new VoidFunction<JavaRDD<String>>() {

            @Override
            public void call(JavaRDD<String> stringJavaRDD) throws Exception {
                if (!stringJavaRDD.isEmpty()) {
                    /*
                        TODO 待补充的内容
                             这里还需要在后面添加一个功能,当每个批次数据消费后,都需要去查询 已经存在健康评估结果的车型车号,在这个批次数据处理前需要确认:
                             是否已经从 出库跑 --> 入库整备 --> 再次出库. 如果是再次出库,则需要将 parts_label中再次出库的车评估结果置为A的过程
                    */

                    // 读取 Kafka 中实时故障数据,形成临时表,摘除常用字段,注意: 这个方法在 foreachRDD内只允许调用一次,可以先将要做的 部件PJM一并传入,最终形成宽表.在从当前表中过滤数据,形成指定部件的数据集,再进行后续入库评估操作.
                    // 定义一个方法,处理基本报警数据逻辑 : 输入宽表信息 + Tcms 实时故障数据关联好的宽表
                    Dataset<Row> tcms_jcll_allDataSet = GeneralDataUtils.processRealTimeData(spark, stringJavaRDD, " in ('HXD2') ", "in ('PJ00003')", "and phm_lb != '-' and phm_lb is not null and phm_lb != ''");
                    // 普通实时故障入 ads_phm_fault, 入视情维修,返回健评估结果,这里的健康评估结果是: 每个位置每个模型的健康度
                    GeneralDataUtils.commonAdsAndSqwxAndEvaluate(spark, tcms_jcll_allDataSet, "in ('HXD2')", "in ('PJ00003')");
                    // 通过 getTrainAttributesDim 这个方法调用后的执行结果获取所有车的信息,在这个临时表中摘出所有车以及部件的基本信息,这里需要入到 parts_label
                    GeneralDataUtils.writePartsLabel(spark, "in ('HXD2')", "in ('PJ00003')");
                } else {
                    System.out.println(" ++++++++++ 当前批次中没有数据");
                }

            }
        });


        // 定义一个方法,处理特殊报警(5秒内组合关系:元件故障)报警数据逻辑 :

        jsc.start();
        try {
            jsc.awaitTermination();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        spark.stop();
    }
}
