import com.alibaba.fastjson.JSONObject;
import com.cookie.config.MysqlConfig;
import com.cookie.config.RedisConfig;
import com.cookie.utils.MysqlUtil;
import com.cookie.utils.RedisUtil;

import javax.sql.DataSource;
import java.sql.ResultSetMetaData;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;

/**
 * 这个异常有七个
 * redis.clients.jedis.exceptions.JedisClusterMaxAttemptsException: No more cluster attempts left.
 * 	at redis.clients.jedis.JedisClusterCommand.runWithRetries(JedisClusterCommand.java:86)
 * 	at redis.clients.jedis.JedisClusterCommand.runWithRetries(JedisClusterCommand.java:124)
 * ...
 * 	at redis.clients.jedis.JedisClusterCommand.runWithRetries(JedisClusterCommand.java:124)
 * 	at redis.clients.jedis.JedisClusterCommand.run(JedisClusterCommand.java:25)
 * 	at redis.clients.jedis.JedisCluster.hset(JedisCluster.java:510)
 * 	at com.cookie.utils.RedisClusterUtil.hashSet(RedisClusterUtil.java:115)
 * 	at com.cookie.utils.RedisUtil.hashSet(RedisUtil.java:109)
 * 	at com.cookie.utils.EtlUtil.lambda$executeSqlImport$3(EtlUtil.java:141)
 * 	at com.cookie.utils.MysqlUtil.sqlRS(MysqlUtil.java:49)
 * 	at com.cookie.utils.EtlUtil.executeSqlImport(EtlUtil.java:127)
 * 	at com.cookie.utils.EtlUtil.lambda$etl$2(EtlUtil.java:95)
 * 	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
 * 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
 * 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
 * 	at java.lang.Thread.run(Thread.java:748)
 * 	这个异常有四个
 * redis.clients.jedis.exceptions.JedisDataException: LOADING Redis is loading the dataset in memory
 * 	at redis.clients.jedis.Protocol.processError(Protocol.java:132)
 * 	at redis.clients.jedis.Protocol.process(Protocol.java:166)
 * 	at redis.clients.jedis.Protocol.read(Protocol.java:220)
 * 	at redis.clients.jedis.Connection.readProtocolWithCheckingBroken(Connection.java:278)
 * 	at redis.clients.jedis.Connection.getUnflushedObjectMultiBulkReply(Connection.java:240)
 * 	at redis.clients.jedis.Connection.getObjectMultiBulkReply(Connection.java:245)
 * 	at redis.clients.jedis.Jedis.clusterSlots(Jedis.java:3486)
 * 	at redis.clients.jedis.JedisClusterInfoCache.discoverClusterSlots(JedisClusterInfoCache.java:161)
 * 	at redis.clients.jedis.JedisClusterInfoCache.renewClusterSlots(JedisClusterInfoCache.java:140)
 * 	at redis.clients.jedis.JedisClusterConnectionHandler.renewSlotCache(JedisClusterConnectionHandler.java:88)
 * 	at redis.clients.jedis.JedisClusterCommand.runWithRetries(JedisClusterCommand.java:121)
 * 	...
 * 	at redis.clients.jedis.JedisClusterCommand.runWithRetries(JedisClusterCommand.java:124)
 * 	at redis.clients.jedis.JedisClusterCommand.run(JedisClusterCommand.java:25)
 * 	at redis.clients.jedis.JedisCluster.hset(JedisCluster.java:510)
 * 	at com.cookie.utils.RedisClusterUtil.hashSet(RedisClusterUtil.java:115)
 * 	at com.cookie.utils.RedisUtil.hashSet(RedisUtil.java:109)
 * 	at com.cookie.utils.EtlUtil.lambda$executeSqlImport$3(EtlUtil.java:143)
 * 	at com.cookie.utils.MysqlUtil.sqlRS(MysqlUtil.java:49)
 * 	at com.cookie.utils.EtlUtil.executeSqlImport(EtlUtil.java:127)
 * 	at com.cookie.utils.EtlUtil.lambda$etl$2(EtlUtil.java:95)
 * 	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
 * 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
 * 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
 * 	at java.lang.Thread.run(Thread.java:748)
 * 这个异常有一堆，估计是cluster没有释放连接
 * redis.clients.jedis.exceptions.JedisClusterException: CLUSTERDOWN The cluster is down
 * 	at redis.clients.jedis.Protocol.processError(Protocol.java:122)
 * 	at redis.clients.jedis.Protocol.process(Protocol.java:166)
 * 	at redis.clients.jedis.Protocol.read(Protocol.java:220)
 * 	at redis.clients.jedis.Connection.readProtocolWithCheckingBroken(Connection.java:278)
 * 	at redis.clients.jedis.Connection.getIntegerReply(Connection.java:220)
 * 	at redis.clients.jedis.Jedis.hset(Jedis.java:737)
 * 	at redis.clients.jedis.JedisCluster$34.execute(JedisCluster.java:508)
 * 	at redis.clients.jedis.JedisCluster$34.execute(JedisCluster.java:505)
 * 	at redis.clients.jedis.JedisClusterCommand.runWithRetries(JedisClusterCommand.java:106)
 * 	at redis.clients.jedis.JedisClusterCommand.run(JedisClusterCommand.java:25)
 * 	at redis.clients.jedis.JedisCluster.hset(JedisCluster.java:510)
 * 	at com.cookie.utils.RedisClusterUtil.hashSet(RedisClusterUtil.java:115)
 * 	at com.cookie.utils.RedisUtil.hashSet(RedisUtil.java:109)
 * 	at com.cookie.utils.EtlUtil.lambda$executeSqlImport$3(EtlUtil.java:143)
 * 	at com.cookie.utils.MysqlUtil.sqlRS(MysqlUtil.java:49)
 * 	at com.cookie.utils.EtlUtil.executeSqlImport(EtlUtil.java:127)
 * 	at com.cookie.utils.EtlUtil.lambda$etl$2(EtlUtil.java:95)
 * 	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
 * 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
 * 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
 * 	at java.lang.Thread.run(Thread.java:748)
 */
public class Mysql2RedisNoAuto {
    /**
     * 840000
     * 780000开始
     *
     * hash:jyjobs:*
     * string:jyjobs:*
     */
    public static String table = "jyjobs";
    // 对于hash类型而言，每个字段要set一次，而string则是将所有字段转换为json再一次set
    public static String prefix = "test";  // hash
    public static final long    CNT_PER_TASK = 10000L;

    public static MysqlUtil mysql;
    public static RedisUtil redis;

    public static void main(String[] args) {

        init();
        // redisTest();
        // mysqlTest();

        String sql = "select * from " + table;
        // long cnt = 7180713;
        long cnt = 115269;

        // etl(sql, cnt);
    }
    public static void redisTest(){
        // redis.stringSet("test","aoo");

        redis.delKey("test");
    }
    public static void mysqlTest(){
        String sql = "select * from jyjobs";
        // 获取主键
        String pk = (String) MysqlUtil.sqlRS(mysql.dataSource, sql, rs -> {
            String primaryKey = null;
            try {
                ResultSetMetaData metaData = rs.getMetaData();
                primaryKey = metaData.getColumnName(1);
            } catch (Exception e) {
                e.printStackTrace();
            }
            return primaryKey == null ? "" : primaryKey;
        });
        System.out.println(pk);
        // 获取主键的最大最小值
        String countSql = "select min("+pk+"),max("+pk+") from jyjobs";
        // AtomicReference<Long> 刚刚不小心摁出了这个东西，结果用不来
        AtomicLong max = new AtomicLong();
        AtomicLong min = new AtomicLong();
        // 这里花了太长时间，对于百万条数据
        MysqlUtil.sqlRS(mysql.dataSource, countSql, rs -> {
            try {
                if (rs.next()) {
                    max.set(((Number) rs.getObject(2)).longValue());
                    min.set(((Number) rs.getObject(1)).longValue());
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
            return 0;
        });
        System.out.printf("max: %d, min: %d.\n", max.get(),min.get());
        /*
        MysqlUtil.sqlRS(mysql.dataSource,"select * from jyjobs", rs->{
            try{
                ResultSetMetaData metaData = rs.getMetaData();
                int columnCount = metaData.getColumnCount();
                JSONObject json = new JSONObject();
                while (rs.next()){
                    for (int i = 1; i <= columnCount; i++) {
                        // 获取列名和列的值
                        json.put(metaData.getColumnName(i), rs.getString(i));

                        System.out.print(metaData.getColumnName(i)+":"+rs.getString(i)+", ");
                    }
                    break;
                }
            }catch (Exception e){
                System.out.println("etl failed");
                throw new RuntimeException(e);
            }finally {
                // 释放资源
            }
            return 0;
        });
         */
    }
    public static void init(){
        MysqlConfig mysqlConf = new MysqlConfig();
        RedisConfig redisConf = new RedisConfig();

        mysqlConf.setUrl("jdbc:mysql://10.100.2.242:3306/jyjobs");
        mysqlConf.setUsername("canal");
        mysqlConf.setPassword("canal");

        redisConf.setCluster(true);
        redisConf.setAddrs("10.100.6.114:7000,10.100.6.114:7001,10.100.6.114:7002,10.100.6.115:7000,10.100.6.115:7001,10.100.6.115:7002");
        redisConf.setAuth("123456");
        redisConf.setPrefix(prefix);
        redisConf.setUseLowerCase(true);

        // 创建mysql工具类实例
        mysql = new MysqlUtil(mysqlConf);
        // 创建redis工具类实例
        redis = new RedisUtil(redisConf);
    }

    public static void etl(String sql, long cnt){
        AtomicLong impCount = new AtomicLong();
        try{
            int threadCount = Runtime.getRuntime().availableProcessors();

            long offset;
            long size = CNT_PER_TASK;
            long workerCnt = cnt / size + (cnt % size == 0 ? 0 : 1);
            System.out.println("work count: "+workerCnt+", count: "+cnt+", thread count: "+threadCount);
            // ExecutorService executor = Util.newFixedThreadPool(threadCount, 5000L);
            ExecutorService executor = new ThreadPoolExecutor(threadCount, threadCount, 5000L,
                    TimeUnit.MILLISECONDS, new SynchronousQueue<>(), (r, exe) -> {
                if (!exe.isShutdown()) {
                    try {
                        exe.getQueue().put(r);
                    } catch (InterruptedException e) {
                        // ignore
                    }
                }
            });
            List<Future<Boolean>> futures = new ArrayList<>();

            for (long i = 0; i < workerCnt; i++) {
                System.out.println("workcount:"+i);
                offset = size * i;
                String sqlFinal = sql + " LIMIT " + offset + "," + size;
                System.out.println(sqlFinal);
                Future<Boolean> future = executor.submit(() ->
                        executeSqlImport(mysql.dataSource, sqlFinal, impCount));
                futures.add(future);
            }

            for (Future<Boolean> future : futures) {
                future.get();
            }
            executor.shutdown();
        }catch (Exception e){
            e.printStackTrace();
        }finally {

        }
    }

    public static boolean executeSqlImport(DataSource ds, String sql, AtomicLong impCount){
        try{
            MysqlUtil.sqlRS(ds,sql, rs->{
                try{
                    ResultSetMetaData metaData = rs.getMetaData();
                    int columnCount = metaData.getColumnCount();
                    JSONObject json = new JSONObject();
                    while (rs.next()){

                        for (int i = 1; i <= columnCount; i++) {
                            // 获取列名和列的值, string类型要加这个
                            json.put(metaData.getColumnName(i), rs.getString(i));
                            // 插入hash类型 prefix+table+":"+

                            // 当value为空时，存入空串
                            /*
                            if(rs.getString(i)==null){
                                redis.hashSet(table+":"+rs.getString(1),metaData.getColumnName(i), "");
                            }else{
                                redis.hashSet(table+":"+rs.getString(1),metaData.getColumnName(i), rs.getString(i));
                            }*/
                            // System.out.print(metaData.getColumnName(i)+":"+rs.getString(i)+", ");
                        }
                        // 调用redis来进行数据同步 string类型
                        redis.stringSet(table+":"+ rs.getString(1),json.toJSONString());
                        // System.out.println(json.toJSONString());
                    }
                }catch (Exception e){
                    System.out.println("etl failed");
                    throw new RuntimeException(e);
                }finally {
                    // 释放资源
                }

                return 0;
            });
            System.out.println("sync success.");
            return true;
        }catch (Exception e){
            e.printStackTrace();
            return false;
        }
    }
}
