package com.example.config;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.fastjson.JSON;
import com.example.constant.CommonConstant;
import com.example.mapper.ResumeMapper;
import com.example.pojo.MySQLConfig;
import org.I0Itec.zkclient.IZkDataListener;
import org.I0Itec.zkclient.ZkClient;
import org.apache.ibatis.mapping.Environment;
import org.apache.ibatis.session.Configuration;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.apache.ibatis.transaction.TransactionFactory;
import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

@Component
public class ConnectionPool implements IZkDataListener {

    private static final Logger logger = LoggerFactory.getLogger(ConnectionPool.class);

    private MySQLConfig mySQLConfig;

    private static ZkClient zkClient = new ZkClient("127.0.0.1:2181",3000);

    private static DruidDataSource druidDataSource = new DruidDataSource();

    private static SqlSessionFactory sessionFactory;

    private static SqlSession sqlSession;

    private static ResumeMapper mapper;


    @Autowired
    private MyAware myAware;

    public DruidDataSource dataSource() {
        logger.info("init() ..........................Before   ................");
        init();
        logger.info("init() ..........................After    ................");
        druidDataSource.setUrl(mySQLConfig.getUrl());
        druidDataSource.setUsername(mySQLConfig.getUsername());
        druidDataSource.setPassword(mySQLConfig.getPassword());
        return druidDataSource;
    }

    private void init() {
        logger.info("init ................................");
        try {
            logger.info("zkClient ===>>>"+zkClient);
            if (zkClient.exists(CommonConstant.rootPath)) {
                Object data = zkClient.readData(CommonConstant.rootPath, new Stat());
                logger.info("zk中存储的配置信息为:" + data);
                mySQLConfig = JSON.parseObject(data.toString(), MySQLConfig.class);
            }else{
                mySQLConfig.setDriver(CommonConstant.driver);
                mySQLConfig.setUrl(CommonConstant.springbootdataUrl);
                mySQLConfig.setUsername(CommonConstant.username);
                mySQLConfig.setPassword(CommonConstant.password);
                zkClient.createPersistent(CommonConstant.rootPath, JSON.toJSONString(mySQLConfig));
            }
            logger.info("开始订阅节点数据变更监听器");
            zkClient.subscribeDataChanges(CommonConstant.rootPath, new ConnectionPool());
            logger.info("解析后的配置为:" + mySQLConfig);
        }catch (Exception e){
            logger.error("exception .........,"+e);
        }
    }

    public ResumeMapper getMapper(){
        TransactionFactory transactionFactory = new JdbcTransactionFactory();
        Environment environment = new Environment("conn", transactionFactory,this.dataSource());
        Configuration configuration = new Configuration(environment);
        configuration.addMapper(ResumeMapper.class);
        sessionFactory = new SqlSessionFactoryBuilder().build(configuration);
        sqlSession = sessionFactory.openSession(true);
        return sqlSession.getMapper(ResumeMapper.class);
    }

    @Override
    public void handleDataChange(String s, Object o) throws Exception {
        logger.info("监听zk节点事件...... ");
        logger.info("o : "+o);
        logger.info("配置节点上的数据有更新,读取出来,重新配置连接池");
        druidDataSource = new DruidDataSource();
        dataSource();
    }

    @Override
    public void handleDataDeleted(String s) throws Exception {
        logger.info("监听zk节点删除事件...... ");
    }
}
