package com.taimanetworks.kafka;


import com.taimanetworks.kafka.parseTsp.Sourcedata;
import com.taimanetworks.kafka.parseTsp.StructureBeanUtil;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


import java.text.SimpleDateFormat;
import java.util.*;

public class Kafka2Clickhouse extends Kafka2TableApp {
    private static final String HADOOP_USER_NAME = "hadoop";
    private static final String CHECKPOINT_NAME = "kafka2hive";
    private static final String KAFKA_SERVERS = "172.22.2.80:9092,172.22.2.81:9092,172.22.2.82:9092";
    private static final String KAFKA_TOPICS = "gac";
    private static final String KAFKA_GROUP_ID = "kafka2ck_test";
//            private static final String CHECKPOINT_STORAGE = "hdfs://taima101:8020/flink/kafkapro/checkpoint/";
    private static final String CHECKPOINT_STORAGE = "hdfs://myha01/flink/kafkapro/checkpoint/";
    //    private static final String sink_Path = "hdfs://myha01/user/hive/warehouse/gqcq_ods/ods_guangqi_data/";
    private static final String sink_Path = "hdfs://myha01/user/hive/warehouse/ods/car_signal/";
    //    private static final String sink_Path = "hdfs://taima101:8020/flink/test/";
    private static final SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    ;

    @Override
    public void handler(StreamTableEnvironment tableEnv, DataStreamSource<String> sourceStream, StreamExecutionEnvironment env) throws Exception {
        System.out.println("from rocketmq... sink to clickhouse...");
        sink2clickhouse(tableEnv, sourceStream, env);
    }

    private void sink2clickhouse(StreamTableEnvironment tableEnv, DataStreamSource<String> sourceStream, StreamExecutionEnvironment env) throws Exception {
        System.out.println("save2clickhouse...");

        System.out.println("解析字段,封装样例类...");
        // 解析字段,封装样例类
        SingleOutputStreamOperator<Sourcedata> resultBean = sourceStream
                .map(StructureBeanUtil::getSource)
                .filter(Objects::nonNull);
/*                .keyBy("vin")
                .timeWindow(Time.minutes(1))
                .apply(new WindowFunction<Sourcedata, List<Sourcedata>, Tuple, TimeWindow>() {
                    @Override
                    public void apply(Tuple tuple, TimeWindow window, Iterable<Sourcedata> input, Collector<List<Sourcedata>> out) throws Exception {
                         List<Sourcedata> list = new ArrayList<>();
                        for (Sourcedata sourcedata : input) {
                            list.add(sourcedata);
                        }
                        out.collect(list);
                    }
                });*/


        //数据保存到clickhouse:
        String sql = "INSERT INTO car_signal (vin,gpss_time,gpss_type,car_statue,recharge_statue,run_statue,vehspeed,milleage,total_voltage,total_current,soc,dc_statue,gears,insulation_resistance,accele_pedal  ,brake_paddle  ,drive_motor_control_temp  ,drive_motor_id  ,drive_motor_speed ,drive_motor_statu ,drive_motor_temp  ,drive_motor_torque ,motor_bus_current ,motor_input_voltage   ,engine_status   ,crankshaft_speed  ,fuel_consume  ,latitude  ,location_status ,longitude ,battery_monomer_max,battery_monomer_min,max_temp,max_temp_probeid,max_temp_systemid ,min_temp  ,min_temp_probeid  ,min_temp_systemid ,voltage_battery_monomerid_max  ,voltage_battery_monomerid_min,voltage_battery_systemid_max  ,voltage_battery_systemid_min  ,battery_cell_lowconsis_alarm ,battery_cell_overpress_alarm,battery_cell_underpress_alarm,battery_hightemp_alarm  ,brake_system_alarm ,common_alarm  ,dc_statu_alarm  ,dc_temp_alarm   ,drive_motor_temp_alarm  ,driver_conttemp_alarm  ,driver_machine_trouble_list  ,driver_machine_trouble_total  ,engine_trouble_list  ,engine_trouble_total,high_pressure_interlock_alrm,high_soc_alarm,insulation_alarm,low_soc_alarm,max_alarm_level,other_trouble_list ,other_trouble_total,recharge_energy ,recharge_fault_list  ,recharge_fault_total  ,soc_hop_alarm   ,temp_diff_alarm ,vehicle_mounted_overpress_alarm  ,vehicle_mounted_storahe_overcharge,vehicle_mounted_underpress_alarm,battery_cell_total,battery_cell_voltage ,recharge_storage_current,recharge_storage_voltage  ,start_battery_cell,start_battery_id,systemid,years,months,days) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
//        String sql = "INSERT into car_test2 ( vin ,gpss_time ,gpss_type) VALUES (?,?,?)";
        System.out.println("sql:  " + sql);

        resultBean.addSink(new ClickhouseFunction(sql));

        System.out.println("数据插入。。。");
        env.execute("clickhouse sink");

    }

    public static void main(String[] args) throws Exception {
        Kafka2Clickhouse app = new Kafka2Clickhouse();
        ParameterTool parameterTool = ParameterTool.fromArgs(args);
        String servers = parameterTool.get("servers", KAFKA_SERVERS);
        String topic = parameterTool.get("topic", KAFKA_TOPICS);
        String group = parameterTool.get("group", KAFKA_GROUP_ID);
        String checkpointStorage = parameterTool.get("checkpointStorage", CHECKPOINT_STORAGE);
        String checkpointName = parameterTool.get("checkpointName", CHECKPOINT_NAME);
        String hadoopUser = parameterTool.get("hadoopUser", HADOOP_USER_NAME);
        app.initAndStart(servers, topic, group, checkpointStorage, checkpointName, hadoopUser);

    }

    public static String getRandomString(int length) {
        String str = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
        Random random = new Random();
        StringBuffer sb = new StringBuffer();
        for (int i = 0; i < length; i++) {
            int number = random.nextInt(62);
            sb.append(str.charAt(number));
        }
        return sb.toString();
    }
}
