package com.zyh.rcm.risk;

import com.zyh.em.entity.EvaluateData;
import com.zyh.em.entity.EvaluateReport;
import com.zyh.em.entity.HistoryData;
import com.zyh.em.evaluate.*;
import com.zyh.em.update.*;
import com.zyh.em.util.ParseLogUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.serialization.SimpleStringEncoder;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.fs.Path;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.DateTimeBucketAssigner;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.util.Properties;

public class LoginRiskJob {
    public static void main(String[] args) throws Exception {
        System.setProperty("HADOOP_USER_NAME","root");
        /**
         * 实时计算，读取到数据之后，
         * 1.对数据进行处理
         * 2.通过自定义mapFunction完成业务功能
         * 3.数据（评估报告）写入到hdfs
         */

        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();

        //DataStreamSource<String> dataStreamSource = environment.socketTextStream("hadoop10", 9999);

        /*
        * 准备消费者参数
        * ip:port
        * topic
        * deserialization method
        * */
        Properties properties = new Properties();
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"kafka24:9092");
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"flink");
        String topicName = "topic-flink";
        SimpleStringSchema simpleStringSchema = new SimpleStringSchema();
        FlinkKafkaConsumer<String> flinkKafkaConsumer = new FlinkKafkaConsumer<>(topicName, simpleStringSchema, properties);
        DataStreamSource<String> dataStreamSource = environment.addSource(flinkKafkaConsumer);

        KeyedStream<Tuple2<String, String>, String> keyedStream = dataStreamSource.filter(log -> ParseLogUtil.isLoginSuccessLog(log) || ParseLogUtil.isEvaluateLog(log))
                .map(new MapFunction<String, Tuple2<String, String>>() {
                    @Override
                    public Tuple2<String, String> map(String log) throws Exception {
                        System.out.println("log1 = " + log);
                        String appNameAndUsername = ParseLogUtil.parseLog2AppNameAndUsername(log);
                        return Tuple2.of(appNameAndUsername, log);
                    }
                }).keyBy(t -> t.f0);

        //通过自定义MapFunction完成数据的处理
        SingleOutputStreamOperator<EvaluateReport> result = keyedStream.map(new LoginEvaluateMapFunction()).filter(report->report!=null);

        result.print();
        /*
        * 将result的结果输出到hdfs
        * 1.basePath确定输出路径
        * 2.encoder确定输出格式(文本or二进制)
        * 3.目录的新建策略
        * */
        Path basePath = new Path("hdfs://hadoop10:9000/final-project");
        SimpleStringEncoder<String> encoder = new SimpleStringEncoder<>();
        DateTimeBucketAssigner<String> bucketAssigner = new DateTimeBucketAssigner<>("yyyy-MM");//目录生成格式

        StreamingFileSink<String> fileSink = StreamingFileSink.forRowFormat(basePath, encoder)
                .withBucketAssigner(bucketAssigner)
                .build();

        //将结果从EvaluateReport转换字符串后，保存到HDFS中
        result.map(new MapFunction<EvaluateReport, String>() {
            @Override
            public String map(EvaluateReport report) throws Exception {
                return new StringBuilder()
                        .append(report.getTime()).append(" ")
                        .append(report.getAppName()).append(" ")
                        .append(report.getUsername()).append(" ")
                        .append(report.getUuid()).append(" ")
                        .append(report.getCity()).append(" ")
                        .append(report.getGeoPoint().getLongitude()).append(":")
                        .append(report.getGeoPoint().getLatitude()).append(" ")
                        .append(report.getRiskFactor().get("city")).append(" ")
                        .append(report.getRiskFactor().get("device")).append(" ")
                        .append(report.getRiskFactor().get("habit")).append(" ")
                        .append(report.getRiskFactor().get("inputFeatures")).append(" ")
                        .append(report.getRiskFactor().get("ordernessPassword")).append(" ")
                        .append(report.getRiskFactor().get("speed"))
                        .toString();
            }
        }).addSink(fileSink);

        environment.execute("loginEvaluateJob");
    }
}

class LoginEvaluateMapFunction extends RichMapFunction<Tuple2<String,String>, EvaluateReport> {
    //历史数据状态
    private ValueState<HistoryData> historyDataState;
    //评估报告
    private MapState<String,String> evaluateReportState;

    @Override
    public void open(Configuration parameters) throws Exception {
        RuntimeContext context = getRuntimeContext();

        ValueStateDescriptor<HistoryData> vsd = new ValueStateDescriptor<>("vsd", Types.POJO(HistoryData.class));
        MapStateDescriptor<String, String> msd = new MapStateDescriptor<>("msd", Types.STRING, Types.STRING);
        msd.setQueryable("evaluateReportState");//开启状态查询
        historyDataState = context.getState(vsd);
        evaluateReportState = context.getMapState(msd);
    }

    @Override
    public EvaluateReport map(Tuple2<String, String> value) throws Exception {
        String log = value.f1;//获取到日志

        HistoryData historyData = historyDataState.value();
        if(historyData == null){
            historyData = new HistoryData();
        }

        //如果日志是登录成功日志，则走更新链更新历史状态
        if (ParseLogUtil.isLoginSuccessLog(log)) {
            UpdateChain updateChain = new UpdateChain();
            updateChain.addUpdates(new CityUpdate(), new DeviceUpdate(10), new HabitUpdate(), new InputFeaturesUpdate(10),
                    new OrdernessPasswordUpdate(), new LastLoginTimeUpdate(), new LastLoginGeoPointUpdate());

            updateChain.doUpdate(ParseLogUtil.parseLog2LoginSuccessData(log),historyData);

            // 更新：把数据更新到历史数据中
            historyDataState.update(historyData);

        }else {
            //如果日志是登录评估日志，则走评估链生成评估报告
            EvaluateChain evaluateChain = new EvaluateChain();
            evaluateChain.addEvaluate(new CItyEvaluate(), new DeviceEvaluate(), new HabitEvaluate(10),
                    new InputFeatureEvaluate(), new OrdernessPasswordEvaluate(0.9), new SpeedEvaluate(750));

            EvaluateData evaluateData = ParseLogUtil.parseLog2EvaluateData(log);
            EvaluateReport evaluateReport = ParseLogUtil.evaluateData2EvaluateReport(evaluateData);

            evaluateChain.doEvaluate(evaluateData,historyData,evaluateReport);

            ObjectMapper objectMapper = new ObjectMapper();
            String json = objectMapper.writeValueAsString(evaluateReport);
            //将生成的评估报告保存到MapState中，同时将评估报告返回
            evaluateReportState.put(evaluateReport.getUuid(),json);

            return evaluateReport;
        }
        return null;
    }

}