package com.cookie.utils;

import com.alibaba.fastjson.JSONObject;
import com.cookie.config.MysqlConfig;
import com.cookie.config.RedisConfig;

import javax.sql.DataSource;
import java.sql.ResultSetMetaData;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicLong;

/**
 * 全量同步工具
 */
public class EtlUtil {
    private String table;

    private MysqlConfig mysqlConf;
    private MysqlUtil mysql;

    private RedisConfig redisConf;
    // 统一调用redis工具类来实现redis相关业务
    private RedisUtil redis;

    private boolean isCluster;

    private final long    CNT_PER_TASK = 10000L;

    public EtlUtil(String table, MysqlConfig mysqlConf, RedisConfig redisConf){
        this.table = table;
        this.mysqlConf = mysqlConf;
        this.redisConf = redisConf;
        // redis是否为集群
        isCluster = redisConf.isCluster();

        // 创建mysql工具类实例
        mysql = new MysqlUtil(mysqlConf);
        // 创建redis工具类实例
        redis = new RedisUtil(redisConf);
    }

    public void etl(){
        EtlResult etlResult = new EtlResult();
        AtomicLong impCount = new AtomicLong();
        long start = System.currentTimeMillis();
        try {
            // String sql = "select * from "+table+";";
            // sql 结尾可以省略分号
            String sql = "select * from " + table;
            System.out.println("etl sql: "+sql);
            // 获取总数
            String countSql = "SELECT COUNT(1) FROM ( " + sql + ") _CNT ";
            // 这里花了太长时间，对于百万条数据
            long cnt = (Long) MysqlUtil.sqlRS(mysql.dataSource, countSql, rs -> {
                Long count = null;
                try {
                    if (rs.next()) {
                        count = ((Number) rs.getObject(1)).longValue();
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
                return count == null ? 0L : count;
            });
            // System.out.println(cnt);

            // 当大于1万条记录时开启多线程
            if (cnt >= 10000) {
                // System.out.println("开启了多线程");
                int threadCount = Runtime.getRuntime().availableProcessors();

                long offset;
                long size = CNT_PER_TASK;
                long workerCnt = cnt / size + (cnt % size == 0 ? 0 : 1);
                System.out.println("work count: "+workerCnt+", count: "+cnt+", thread count: "+threadCount);
                // ExecutorService executor = Util.newFixedThreadPool(threadCount, 5000L);
                ExecutorService executor = new ThreadPoolExecutor(threadCount, threadCount, 5000L,
                        TimeUnit.MILLISECONDS, new SynchronousQueue<>(), (r, exe) -> {
                    if (!exe.isShutdown()) {
                        try {
                            exe.getQueue().put(r);
                        } catch (InterruptedException e) {
                            // ignore
                        }
                    }
                });
                List<Future<Boolean>> futures = new ArrayList<>();

                for (long i = 0; i < workerCnt; i++) {
                    System.out.println("workcount:"+i);
                    offset = size * i;
                    String sqlFinal = sql + " LIMIT " + offset + "," + size;
                    System.out.println(sqlFinal);
                    Future<Boolean> future = executor.submit(() ->
                            executeSqlImport(mysql.dataSource, sqlFinal, impCount));
                    futures.add(future);
                }

                for (Future<Boolean> future : futures) {
                    future.get();
                }
                executor.shutdown();
            } else {
                executeSqlImport(mysql.dataSource, sql,impCount);
            }
            // System.out.printf("数据全量导入完成，一共导入%d数据，耗时: %d.\n",impCount.get(),System.currentTimeMillis()-start);
            // etlResult.setResultMessage("导入 "+ table + " 数据："+impCount.get()+"条");
        }catch (Exception e){
            e.printStackTrace();
        }finally {

            // 如果是集群模式，则在这里释放资源
            if(isCluster){
                redis.closeResource();
            }
        }
        // redis集群出现异常
        // JedisNoReachableClusterNodeException: No reachable node in cluster
        // 因为调用了 上面的 redis.closeResource(); 也就是 JedisCluster.close()导致的
        // jedis cluster内部维护了一个连接池，每次使用完毕会自动释放，
        // 而手动调用close方法则会直接关闭集群连接，此时再调用相关api就会出现该异常

    }

    protected boolean executeSqlImport(DataSource ds, String sql, AtomicLong impCount){
        try{
            MysqlUtil.sqlRS(ds,sql,rs->{
                try{
                    ResultSetMetaData metaData = rs.getMetaData();
                    int columnCount = metaData.getColumnCount();
                    JSONObject json = new JSONObject();
                    while (rs.next()){

                        for (int i = 1; i <= columnCount; i++) {
                            // 获取列名和列的值
                            json.put(metaData.getColumnName(i), rs.getString(i));
                            // 插入hash类型 prefix+table+":"+

                            // 当value为空时，存入空串
                            /*
                            if(rs.getString(i)==null){
                                redis.hashSet(table+":"+rs.getString(1),metaData.getColumnName(i), "");
                            }else{
                                redis.hashSet(table+":"+rs.getString(1),metaData.getColumnName(i), rs.getString(i));
                            }
                            */
                            // System.out.print(metaData.getColumnName(i)+":"+rs.getString(i)+", ");
                        }
                        // 调用redis来进行数据同步
                        redis.stringSet(table+":"+ rs.getString(1),json.toJSONString());
                        // System.out.println(json.toJSONString());
                    }
                }catch (Exception e){
                    System.out.println("etl failed");
                    throw new RuntimeException(e);
                }finally {
                    if(!isCluster){
                        // 释放资源
                        redis.closeResource();
                    }
                }

                return 0;
            });
            System.out.println("sync success.");
            return true;
        }catch (Exception e){
            e.printStackTrace();
            return false;
        }
    }
}
