package com;


import com.bean.Person;
import com.utils.JdbcUtils;
import org.apache.flink.api.common.serialization.SimpleStringEncoder;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.fs.Path;
import org.apache.flink.core.io.SimpleVersionedSerializer;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.BucketAssigner;
import org.apache.flink.streaming.api.functions.sink.filesystem.OutputFileConfig;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.SimpleVersionedStringSerializer;
import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.flink.util.Preconditions;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.concurrent.TimeUnit;

/**
 * @Description: TODO QQ1667847363
 * @author: xiao kun tai
 * @date:2022/2/8 11:10
 */
public class MysqlToHDFS {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);


        env.enableCheckpointing(1000);

        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);

        env.getCheckpointConfig().setCheckpointTimeout(60000);

        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500);

        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);

        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        env.getCheckpointConfig().setFailOnCheckpointingErrors(true);


        Long startTime = System.currentTimeMillis();

        DataStream<Person> streamSource = env.addSource(new MyMysqlSource("select * from test.mysql_hive"));
//        DataStream<Test> streamSource = env.addSource(new MyMysqlSource("select * from test.workloadfeetest2daydatatable"));
//        streamSource.print();


        System.setProperty("HADOOP_USER_NAME", "root");


        String outputBasePath = "hdfs://docker:9000/user/hive2/warehouse/test.db/mysql_hive";
//        String outputBasePath = "hdfs://docker:9000/data";

        StreamingFileSink<Person> sink = StreamingFileSink.forRowFormat(new Path(outputBasePath), new SimpleStringEncoder<Person>("UTF-8"))
//        StreamingFileSink<Test> sink = StreamingFileSink.forRowFormat(new Path(outputBasePath), new SimpleStringEncoder<Test>("UTF-8"))


                /*.withRollingPolicy(OnCheckpointRollingPolicy.build())*/


                .withRollingPolicy(
                        DefaultRollingPolicy.builder()
                                .withRolloverInterval(TimeUnit.MINUTES.toMillis(30))
                                .withInactivityInterval(TimeUnit.MINUTES.toMillis(5))
                                .withMaxPartSize(1024 * 1024 * 128)
                                .build())


//                .withBucketAssigner(new DateTimeBucketAssigner<>("yyyyMMdd", ZoneId.of("Asia/Shanghai")))
                .withBucketAssigner(new CustomBucketAssigner("yyyyMMdd", ZoneId.of("Asia/Shanghai"), "dt="))
//                .withBucketAssigner(new BasePathBucketAssigner())
                .withBucketCheckInterval(1)
                .withOutputFileConfig(
                        OutputFileConfig.builder()
                                .withPartSuffix("--")
                                .withPartPrefix("part")
                                .withPartSuffix(".ext")
                                .build())
                .build();

        streamSource.addSink(sink);


        env.execute();

        Long endTime = System.currentTimeMillis();

        Long tempTime = (endTime - startTime);

        System.out.println("花费时间：" +

                (((tempTime / 86400000) > 0) ? ((tempTime / 86400000) + "d") : "") +

                ((((tempTime / 86400000) > 0) || ((tempTime % 86400000 / 3600000) > 0)) ? ((tempTime % 86400000 / 3600000) + "h") : ("")) +

                ((((tempTime / 3600000) > 0) || ((tempTime % 3600000 / 60000) > 0)) ? ((tempTime % 3600000 / 60000) + "m") : ("")) +

                ((((tempTime / 60000) > 0) || ((tempTime % 60000 / 1000) > 0)) ? ((tempTime % 60000 / 1000) + "s") : ("")) +

                ((tempTime % 1000) + "ms"));


    }

    /**
     * CREATE TABLE `mysql_hive`  (
     * `id` int(11) NULL DEFAULT NULL,
     * `name` varchar(255) ,
     * `age` int(11) NULL DEFAULT NULL,
     * `money` double NULL DEFAULT NULL,
     * `todate` date NULL DEFAULT NULL,
     * `ts` timestamp NULL DEFAULT NULL
     * ) ;
     */
    public static class MyMysqlSource extends RichSourceFunction<Person> {
//    public static class MyMysqlSource extends RichSourceFunction<Test> {
        private String sql;
        private Connection conn = null;

        public MyMysqlSource(String sql) {
            this.sql = sql;
        }


        @Override
        public void open(Configuration parameters) throws Exception {
            String driver = "com.mysql.jdbc.Driver";
            String url = "jdbc:mysql://localhost:3306/test?useSSL=true";
            String username = "root";
            String password = "A";
            Class.forName(driver);
            conn = DriverManager.getConnection(url, username, password);
        }

        @Override
        public void run(SourceContext<Person> sourceContext) throws Exception {
            List<Person> personList = JdbcUtils.queryList(conn, sql, Person.class);
            for (Person person : personList) {
                sourceContext.collect(person);
            }

        }


       /* @Override
        public void run(SourceContext<Test> sourceContext) throws Exception {
            List<Test> personList = JdbcUtils.queryList(conn, sql, Test.class);
            for (Test person : personList) {
                sourceContext.collect(person);
            }

        }*/

        @Override
        public void cancel() {

        }

        @Override
        public void close() throws Exception {
            if (conn != null) {
                try {
                    conn.close();
                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }
        }
    }


    public static class CustomBucketAssigner implements BucketAssigner<Person, String> {
//    public static class CustomBucketAssigner implements BucketAssigner<Test, String> {

        private static final long serialVersionUID = 1L;

        private static final String DEFAULT_FORMAT_STRING = "yyyy-MM-dd";

        private final String formatString;

        private final ZoneId zoneId;

        private final String column;

        private transient DateTimeFormatter dateTimeFormatter;


        public CustomBucketAssigner() {
            this(DEFAULT_FORMAT_STRING);
        }


        public CustomBucketAssigner(String formatString) {
            this(formatString, ZoneId.systemDefault(), "");
        }


        public CustomBucketAssigner(ZoneId zoneId) {
            this(DEFAULT_FORMAT_STRING, zoneId, "");
        }


        public CustomBucketAssigner(String formatString, String column) {
            this(formatString, ZoneId.systemDefault(), column);
        }

        public CustomBucketAssigner(String formatString, ZoneId zoneId) {
            this(formatString, ZoneId.systemDefault(), "");
        }


        public CustomBucketAssigner(String formatString, ZoneId zoneId, String column) {
            this.formatString = Preconditions.checkNotNull(formatString);
            this.zoneId = Preconditions.checkNotNull(zoneId);
            this.column = Preconditions.checkNotNull(column);
        }

        @Override
        public String getBucketId(Person element, Context context) {
//        public String getBucketId(Test element, Context context) {
            if (dateTimeFormatter == null) {
                dateTimeFormatter = DateTimeFormatter.ofPattern(formatString).withZone(zoneId);
            }
//            return column + dateTimeFormatter.format(Instant.ofEpochMilli(context.currentProcessingTime()));
            return "";
        }

        @Override
        public SimpleVersionedSerializer<String> getSerializer() {
            return SimpleVersionedStringSerializer.INSTANCE;
        }

        @Override
        public String toString() {
            return "DateTimeBucketAssigner{"
                    + "formatString='"
                    + formatString
                    + '\''
                    + ", zoneId="
                    + zoneId
                    + '}';
        }
    }

}
