package flink.scene;

//import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
//import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
//import com.alibaba.ververica.cdc.debezium.DebeziumDeserializationSchema;
//import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
//import com.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema;
import com.google.gson.Gson;
import com.google.gson.internal.LinkedTreeMap;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.DebeziumDeserializationSchema;
import com.ververica.cdc.debezium.DebeziumSourceFunction;
import com.ververica.cdc.debezium.StringDebeziumDeserializationSchema;
import io.debezium.data.Envelope;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.util.Collector;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.util.HashMap;

public class FromMysqlToClickhouse {

    //格式化为：{"database":"test","data":{"name":"jacky","description":"fffff","id":8},"type":"insert","table":"test_cdc"}
    public static class JsonDebeziumDeserializationSchema implements DebeziumDeserializationSchema {
           public void deserialize(SourceRecord sourceRecord, Collector collector)throws Exception{
                 System.out.println("deserialize");
                 Gson jsstr = new Gson();
                 HashMap<String,Object> hs = new HashMap<String,Object>();

                 String topic = sourceRecord.topic();
                 String[] split = topic.split("[.]");
                 String database = split[1];
                 String table = split[2];
                 hs.put("database",database);
                 hs.put("table",table);
                 //获取操作类型
                 Envelope.Operation operation=Envelope.operationFor(sourceRecord);
                 //获取数据本身
                 Struct struct = (Struct)sourceRecord.value();
                 Struct after = struct.getStruct("after");

                 if(after!=null) {
                       Schema schema = after.schema();
                       HashMap<String,Object> afhs = new HashMap<String,Object>();
                       for(Field field:schema.fields()) {
                             afhs.put(field.name(),after.get(field.name()));
                           }
                       hs.put("data",afhs);
                 }

                 String type = operation.toString().toLowerCase();
                 if("create".equals(type)) {
                       type="insert";
                 }
                 hs.put("type",type);

                 collector.collect(jsstr.toJson(hs));
           }

           public TypeInformation<String> getProducedType() {
                 return BasicTypeInfo.STRING_TYPE_INFO;
           }
    }

    public static class ClickhouseSink extends RichSinkFunction<String> {
           Connection connection;
           PreparedStatement pstmt;
           private Connection getConnection() {
                 System.out.println("getConnection");
                 Connection conn = null;
                 try{
                       Class.forName("ru.yandex.clickhouse.ClickHouseDriver");
                       String url="jdbc:clickhouse://192.168.10.134:8123/default";
                       conn = DriverManager.getConnection(url,"root","root");

                     }catch(Exception e) {
                       e.printStackTrace();
                     }
                 return conn;
           }

           public void open(Configuration parameters)throws Exception{
               System.out.println("open");
                 super.open(parameters);
                 connection = getConnection();
                 String sql="INSERT INTO default.appointment_ore_info (ORE_APPOIN_ID,PARK_ID,PLATE_NUM,TRANSPORT_TYPE,CUSTOMER_ID,CUSTOMER_NAME,PHONE_NUM,TONNAGE_NUM,SUPPLIER_ID,ORE_TYPE,ORE_NAME,DEPARTURE_PLACE,STO_YARD_ID,CAR_LENGTH,STOCKYARD_NAME,VEHICLE_STS,CREATE_DATE,CREATER_ID,STS,STS_DATE,MODIFIER_ID,DELIVERY_PIC,PROD_ORE_RELATION_ID,CARRIER_ID,DEPT_ID,SHIPPING_ADDRESS,DEPT_NAME,WORK_TYPE) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
                 pstmt=connection.prepareStatement(sql);
               }

           // 每条记录插入时调用一次
           public void invoke(String value,Context context)throws Exception{
                 //{"database":"test","data":{"name":"jacky","description":"fffff","id":8},"type":"insert","table":"test_cdc"}
                 System.out.println("invoke");
                 Gson t = new Gson();
                 HashMap<String,Object> hs = t.fromJson(value,HashMap.class);
                 String database=(String)hs.get("database");
                 String table=(String)hs.get("table");
                 String type=(String)hs.get("type");

                 if("default".equals(database) && "appointment_ore_info".equals(table)){
                       if("insert".equals(type)){
                             LinkedTreeMap<String,Object> data=(LinkedTreeMap<String,Object>)hs.get("data");
                             String name=(String)data.get("name");
                             String description=(String)data.get("description");
                             Double id=(Double)data.get("id");
                             // 未前面的占位符赋值
//                             pstmt.setInt(1,id.intValue());
//                             pstmt.setString(2,name);
//                             pstmt.setString(3,description);
//
//                             pstmt.executeUpdate();
                           }
                 }
           }

           public void close()throws Exception{
               System.out.println("close");
                 super.close();
                 if(pstmt!=null) {
                       pstmt.close();
                 }
                 if(connection!=null) {
                       connection.close();
                 }
           }
    }
    
    public static void main(String[] args) throws Exception {
        try{
            StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
            env.setParallelism(1);
            env.setRestartStrategy(RestartStrategies.noRestart());

            DebeziumSourceFunction<String> sourceFunction = com.ververica.cdc.connectors.mysql.MySqlSource.<String>builder()
                    .hostname("127.0.0.1")
                    .port(3306)
                    .username("root")
                    .password("root")
                    .serverTimeZone("Asia/Shanghai")
                    .databaseList("smart_park_ex")
                    .tableList("appointment_ore_info")
                    .deserializer(new JsonDebeziumDeserializationSchema()) //自定义返回结果集
                    .startupOptions(StartupOptions.initial())
                    .serverTimeZone("UTC")
                    .startupOptions(StartupOptions.initial())
                    .build();

            DataStreamSource<String> source = env.addSource(sourceFunction);
            source.name("获取mysql数据");
            source.print();
            source.name("插入clickhouse").addSink(new ClickhouseSink());
            env.execute("flink-mysql-cdc:clickhouse");
        }catch (Exception e){
            e.printStackTrace();
        }
    }
}
