package com.yangzb;
import com.clickhouse.client.ClickHouseCompression;

import cn.hutool.core.io.IoUtil;
import com.clickhouse.client.ClickHouseException;
import com.clickhouse.client.ClickHouseFormat;
import com.google.common.collect.Maps;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.Map;
import java.util.Set;

@Slf4j
public class Main {
    public static void main(String[] args) throws Exception {
//        long s1 = System.currentTimeMillis();
//        file();
//        long e1 = System.currentTimeMillis();
//        System.out.println(DateUtil.date(s1));
//        System.out.println(DateUtil.date(e1));
//        System.out.println((e1 - s1) / 1000);
//        long s2 = System.currentTimeMillis();
//        jdbc();
//        long e2 = System.currentTimeMillis();
//        System.out.println((e2 - s2) / 1000);
//        System.out.println(DateUtil.date(s2));
//        System.out.println(DateUtil.date(e2));

//        insertHttp();
//        file();

        HdfsConfig hdfsConfig = new HdfsConfig();
        hdfsConfig.setHdfsPath("hdfs://hdp6.tydic.com:8020/apps/hive/warehouse/test.db/test_5000w_s/shard_id=0");
//        hdfsConfig.setLocalPath("/Users/yzb/Downloads/ch/download");
        hdfsConfig.setRecursive(true);
        ClickHouseConfig clickHouseConfig = new ClickHouseConfig();
        clickHouseConfig.setClickhouseTableName("test.test_5000w");
        clickHouseConfig.setUris("http://192.168.10.17:8124/test?user=mkt&password=1234567890");
        clickHouseConfig.setFormat(ClickHouseFormat.TSV);
//        clickHouseConfig.setClickHouseCompression(ClickHouseCompression.NONE);

        HdfsToClickHouse hdfsToClickHouse = new HdfsToClickHouse(hdfsConfig,clickHouseConfig);
        hdfsToClickHouse.run();
    }

    private static void file() throws Exception {
        String path = "hdfs://hdp6.tydic.com:8020/apps/hive/warehouse/test.db/test_5000w_s/shard_id=0";
//        String path = "hdfs://hdp6.tydic.com:8020/apps/hive/warehouse/test.db/test_5000w_s/";
        Configuration configuration = new Configuration();
        configuration.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        configuration.set("fs.default.name", "hdfs://hdp6.tydic.com:8020");
        FileSystem fileSystem = FileSystem.get(configuration);
        //2.hdfs文件输出流
//        FSDataOutputStream fsDataOutputStream = fileSystem.append(new Path("hdfs://hdp6.tydic.com:8020/apps/hive/warehouse/test.db/test_5000w_s/shard_id=0/*"));
        RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator = fileSystem.listFiles(new Path(path), true);
        int i = 0;
        while (locatedFileStatusRemoteIterator.hasNext()) {
            LocatedFileStatus next = locatedFileStatusRemoteIterator.next();
            FSDataInputStream open = fileSystem.open(next.getPath());
            IoUtil.copy(open,new FileOutputStream(new File("")));
//            fileSystem.
//            BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(open));
//            bufferedReader.
//            FastByteArrayOutputStream read = IoUtil.read(open);
//            IoUtil.write(new GZIPOutputStream(read));
//            GZIPOutputStream gzipOutputStream = new GZIPOutputStream(read);
//            bufferedReader.close();
//            open.close();
        }
        fileSystem.close();
    }

    private static void insertHttp() throws ClickHouseException, IOException {
        ClickHouseUtil.importFile("http://192.168.10.17:8124/test?user=mkt&password=1234567890","","test.test_5000w",ClickHouseFormat.TSV,null);
    }

    private static void jdbc() throws Exception {
        Map hiveConfig = Maps.newHashMap();
        hiveConfig.put("spring.datasource.hive.driverClassName", "org.apache.hive.jdbc.HiveDriver");
        hiveConfig.put("spring.datasource.hive.url", "jdbc:hive2://hdp6.tydic.com:2181,hdp7.tydic.com:2181,hdp8.tydic.com:2181/test;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2");
        hiveConfig.put("spring.datasource.hive.username", "hive");
        hiveConfig.put("spring.datasource.hive.password", "");
//        hiveConfig.put("spring.datasource.hive.queuename", hiveConnInfo.getHiveQueueName());
        HiveUtil.init(hiveConfig);
        Connection connection = HiveUtil.getConnection(null);


//        Connection connection = Db.use().getConnection();
        PreparedStatement statement = connection.prepareStatement("select * from test.test_5000w_s where shard_id=0", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
        ResultSet resultSet = statement.executeQuery();
        int i = 0;
        while (resultSet.next()) {
            i++;
//            int object = resultSet.getInt("id");
//            if(i==1000000){
//                return;
//            }
            if (i % 10000 == 0) {
                System.out.println(i + "=" + resultSet.getString("c1"));
            }
        }
        resultSet.close();
        statement.close();
        connection.close();
    }
}
