package com.lazar.bigdata.zk;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.lazar.bigdata.model.User;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import lombok.Data;
import lombok.ToString;
import org.I0Itec.zkclient.IZkDataListener;
import org.I0Itec.zkclient.ZkClient;
import org.I0Itec.zkclient.exception.ZkMarshallingError;
import org.I0Itec.zkclient.serialize.ZkSerializer;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;

public class DBPool{
    private static HikariDataSource hikariDataSource;
    private static ZkClient zkClient;
    private static ObjectMapper mapper = new ObjectMapper();

    /*
    * 1.连接zk
    * 2.订阅/jdbc节点
    * 3。当数据变化时重新创建连接池
    * */
    public static void initZK(){
        zkClient = new ZkClient("node51:2181");
        zkClient.setZkSerializer(new ZkStrSerializer());
        zkClient.subscribeDataChanges("/jdbc", new IZkDataListener() {
            @Override
            public void handleDataChange(String s, Object o) throws Exception {
                System.out.println(s + "config is changed,new config is "+ o );
                hikariDataSource.close();
                initHikariSource();
            }

            @Override
            public void handleDataDeleted(String s) throws Exception {
                System.out.println(s + "is delete");
                hikariDataSource.close();
            }
        });
    }

    public static void initHikariSource(){
        JDBCConfig myConfig = getJDBCConfig();
        updateHiKariConfig(myConfig);
    }
    private static void updateHiKariConfig(JDBCConfig jdbcConfig){
        HikariConfig config = new HikariConfig();
        config.setJdbcUrl(jdbcConfig.getUrl());
        config.setUsername(jdbcConfig.getUser());
        config.setPassword(jdbcConfig.getPass());
        config.addDataSourceProperty("driverClassName","com.mysql.jdbc.Driver");
        config.addDataSourceProperty( "cachePrepStmts" , "true" );
        config.addDataSourceProperty( "prepStmtCacheSize" , "250" );
        config.addDataSourceProperty( "prepStmtCacheSqlLimit" , "2048");
        hikariDataSource = new HikariDataSource(config);
    }
    private static JDBCConfig getJDBCConfig(){
        Object config = zkClient.readData("/jdbc");
        try {
            JDBCConfig jdbcConfig = mapper.readValue(config.toString(),JDBCConfig.class);
            System.out.println(jdbcConfig);
            return jdbcConfig;
        } catch (JsonProcessingException e) {
            e.printStackTrace();
            return null;
        }
    }

    public static User getUser(){
        Connection conn = null;
        User u = new User();
        try {
            conn = hikariDataSource.getConnection();
            PreparedStatement ps = conn.prepareStatement("select id,name,pass from users where id = ?");
            ps.setInt(1,1);
            ResultSet rs = ps.executeQuery();

            while (rs.next()){
                u.setId(rs.getInt(1));
                u.setName(rs.getString(2));
            }
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return u;
    }

}

class ZkStrSerializer implements ZkSerializer{

    @Override
    public byte[] serialize(Object o) throws ZkMarshallingError {
        return String.valueOf(o).getBytes();
    }

    @Override
    public Object deserialize(byte[] bytes) throws ZkMarshallingError {
        return new String(bytes);
    }
}

@Data
@ToString
class JDBCConfig {
    private String url;
    private String user;
    private String pass;
}