package com.mai.realtime.function;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.mai.realtime.common.Constant;

import org.apache.commons.beanutils.BeanUtils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.async.ResultFuture;
import org.apache.flink.streaming.api.functions.async.RichAsyncFunction;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;

import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.Collections;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

/**
 * @Creator: LBG
 * @CreateTime: 2022-09-01  21:42
 */
public abstract class DimFatchFunction<T> extends RichAsyncFunction<T,T> {

    private ThreadPoolExecutor threadP;
    private DruidDataSource druidDataSource;
    private JedisPool jedisPool;

    @Override
    public void open(Configuration parameters) throws Exception {
        //多并行度所用线程池
        threadP = new ThreadPoolExecutor(50, 100, 30 * 60, TimeUnit.SECONDS, new LinkedBlockingQueue<>());
        //连接phoenix用的druid连接池
        druidDataSource = new DruidDataSource();
        // 设置驱动全类名
        druidDataSource.setDriverClassName(Constant.PHOENIX_DRIVER);
        // 设置连接 url
        druidDataSource.setUrl(Constant.PHOENIX_URL);
        // 设置初始化连接池时池中连接的数量
        druidDataSource.setInitialSize(5);
        // 设置同时活跃的最大连接数
        druidDataSource.setMaxActive(20);
        // 设置空闲时的最小连接数，必须介于 0 和最大连接数之间，默认为 0
        druidDataSource.setMinIdle(1);
        // 设置没有空余连接时的等待时间，超时抛出异常，-1 表示一直等待
        druidDataSource.setMaxWait(-1);
        // 验证连接是否可用使用的 SQL 语句
        druidDataSource.setValidationQuery("select 1");
        // 指明连接是否被空闲连接回收器（如果有）进行检验，如果检测失败，则连接将被从池中去除
        // 注意，默认值为 true，如果没有设置 validationQuery，则报错
        // testWhileIdle is true, validationQuery not set
        druidDataSource.setTestWhileIdle(true);
        // 借出连接时，是否测试，设置为 false，不测试，否则很影响性能
        druidDataSource.setTestOnBorrow(true);
        // 归还连接时，是否测试
        druidDataSource.setTestOnReturn(false);
        // 设置空闲连接回收器每隔 30s 运行一次
        druidDataSource.setTimeBetweenEvictionRunsMillis(30 * 1000L);
        // 设置池中连接空闲 30min 被回收，默认值即为 30 min
        druidDataSource.setMinEvictableIdleTimeMillis(30 * 60 * 1000L);



        //redis连接池
        JedisPoolConfig config = new JedisPoolConfig();
        config.setMaxTotal(100);
        config.setMaxIdle(10);
        config.setMaxWaitMillis(10*100);
        config.setMinIdle(2);
        config.setTestOnCreate(true);
        config.setTestOnBorrow(true);
        config.setTestOnReturn(false);
        jedisPool = new JedisPool(config, "hadoop162", 6379);



    }

    @Override
    public void asyncInvoke(T input, ResultFuture<T> resultFuture) throws Exception {
        threadP.submit(new Runnable() {

            private Jedis jedis;
            private DruidPooledConnection phoeConn;

            @Override
            public void run() {


                try {
                    //连接池拿出连接
                    phoeConn = druidDataSource.getConnection();
                    jedis = jedisPool.getResource();
                    jedis.select(1);

                    //从redis读取配置  没有从phoenix读并缓存入redis

                    String key = getTable() + ":" + getId(input);
                    String json = jedis.get(key);
                    if (json == null) {
                        String sql = "select * from   " + getTable() + "  where id=?";

                        PreparedStatement ps = phoeConn.prepareStatement(sql);
                        ps.setString(1,getId(input));

                        ResultSet resultSet = ps.executeQuery();
                        ResultSetMetaData metaData = resultSet.getMetaData();

                        int columnCount = metaData.getColumnCount();


                        while (resultSet.next()) {
//                            T t = (T) input.getClass().newInstance();

//                            T t = (T) input.getClass().getDeclaredConstructor().newInstance();
                            JSONObject t = JSONObject.class.newInstance();


                            for (int i = 0; i < columnCount; i++) {
//                                String columnName = metaData.getColumnName(i);
                                String columnName = metaData.getColumnLabel(i+1);
                                Object v = resultSet.getObject(i+1);
                                BeanUtils.setProperty(t,columnName,v);
                            }
                            json=  t.toJSONString();
                        }

                        jedis.setex(key,30*60,json);
                        JSONObject dim = JSON.parseObject(json);
                        addDim(dim,input);

                        resultFuture.complete(Collections.singletonList(input));



                        ps.close();
                    }



                } catch (Exception throwables) {
                    throwables.printStackTrace();
                }finally {

                    try {
                        phoeConn.close();
                    } catch (SQLException throwables) {
                        throwables.printStackTrace();
                    }
                    jedis.close();
                }


            }
        });
    }


    public abstract String getTable();
    public abstract String getId(T input);
    public abstract void addDim(JSONObject dim, T input);



    @Override
    public void timeout(T input, ResultFuture<T> resultFuture) throws Exception {
        super.timeout(input, resultFuture);
    }


}
