package com.mao.flink;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.mao.pojo.Root;
import com.mao.pojo.Target;

import com.mao.utils.ExcelUtils;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.io.TextInputFormat;


import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;


import org.apache.flink.util.Collector;

import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

import static com.mao.utils.ExcelUtils.writeIntoExcel;

/**
 * @Description: Deal with json files acquired from hdfs
 * @Param:
 * @Thought:
 * @Author: Wenjie Mao
 * @create： 2022-11-22 10:39
 */
public class ManageJsonFromHdfs extends Root {

    public static void main(String[] args) throws Exception {
//        writeFromHdfsToXlsxLocal("hdfs://hadoop102:8020/allJson/","D:\\CloudComputing\\test.xlsx");

        writeFromHdfsToMysql("hdfs://192.168.10.102:8020/allJson/");

    }

    /**
     *
     * @param HdfsURL:upper folder Of target files
     * @throws Exception
     * @Function: deal with all json files inside the folder assigned by target folder(HdfsUrl)
     * @Problem: The SSL method to get connection with Mysql is unuseful in the structure composed by jdk1.8
     *           jdk 11 is currently used by this project.
     * @Problem: The parameters determined by author is currently the best performing ones, please be careful to change!
     *
     * @Author: Wenjie Mao write at 23:46
     */


    public static void writeFromHdfsToMySql(String HdfsURL) throws Exception {
        //create the environment of Streaming Process in Flink
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        TextInputFormat inputFormat = new TextInputFormat(new Path(HdfsURL));
        //Data access
        DataStreamSource<String> stream = env.createInput(inputFormat);
        //Use function defined in 'JsonToBean' to cast json files into a list of Target(pojo, extraction from root, useful data)
        SingleOutputStreamOperator<String> targetListStreamOperator = stream.map((MapFunction<String, String>) s -> {
            StringBuilder sb = new StringBuilder();
            sb.append("{\"list\":");
            sb.append(s);
            sb.append("}");
            String tmp = sb.toString();
            return tmp;
        }).returns(Types.POJO(String.class)).setParallelism(1);

        SingleOutputStreamOperator<JSONArray> jsonArraySingleOutputStreamOperator = targetListStreamOperator.map(new MapFunction<String, JSONArray>() {
            @Override
            public JSONArray map(String s) throws Exception {
                StringBuilder sb = new StringBuilder();
                sb.append("{\"list\":");
                sb.append(s);
                sb.append("}");
                String jsonString = sb.toString();
                JSONObject json = JSON.parseObject(jsonString);
                return json.getJSONArray("list");
            }
        }).returns(Types.POJO(JSONArray.class)).setParallelism(1);

        SingleOutputStreamOperator<List<Root>> rootListSingleOutputOperator = jsonArraySingleOutputStreamOperator.map(new MapFunction<JSONArray, List<Root>>() {
            @Override
            public List<Root> map(JSONArray value) throws Exception {
                ArrayList<Root> roots = new ArrayList<>();
                for (int i = 0; i < value.size(); i++) {
                    JSONObject jsonObject = value.getJSONObject(i);
                    Root root = JSON.toJavaObject(jsonObject, Root.class);
                    roots.add(root);
                }
                return roots;
            }
        }).returns(Types.LIST(Types.POJO(Root.class))).setParallelism(1);

        SingleOutputStreamOperator<Target> targetSingleOutputStreamOperator = rootListSingleOutputOperator.flatMap(new FlatMapFunction<List<Root>, Target>() {
            @Override
            public void flatMap(List<Root> value, Collector<Target> out) throws Exception {
                for (Root root : value) {
                    Target target = new Target();
                    target.setId(root.getId());
                    target.setLanguage(root.getLanguage());
                    target.setFull_name(root.getFull_name());
                    target.setForks_count(root.getForks_count());
                    target.setStargazers_count(root.getStargazers_count());
                    target.setWatchers_count(root.getWatchers_count());
                    target.setOpen_issues_count(root.getOpen_issues_count());
                    target.setHas_issues(root.isHas_issues());
                    target.setHas_wiki(root.isHas_wiki());

                    SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM");
                    String format = simpleDateFormat.format(root.getCreated_at());

                    target.setCreate_at(format);
                    out.collect(target);
                }
            }
        }).returns(Types.POJO(Target.class)).setParallelism(1);

        SingleOutputStreamOperator<Target> formatSingleOutPutOperator = targetSingleOutputStreamOperator.map(new MapFunction<Target, Target>() {
            @Override
            public Target map(Target value) throws Exception {
                SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM");
                String format = simpleDateFormat.format(System.currentTimeMillis());
                return value;
            }
        });
        //the example of sql
        /*"insert into target (id,language,full_name,forks_count,stargazers_count,watchers_count,open_issues_count,has_issues,has_wiki,create_at) values(?,?,?,?,?,?,?,?,?,?);"*/
        //add DataSink to the streamOperator which has dealt with all json file
        formatSingleOutPutOperator.setParallelism(1);
        String sql = "insert into target (id,language,full_name,forks_count,stargazers_count,watchers_count,open_issues_count,has_issues,has_wiki,create_at) values(?,?,?,?,?,?,?,?,?,?);";
        targetSingleOutputStreamOperator.addSink(JdbcSink.sink(sql,new JdbcStatementBuilder<Target>(){
                    //the 'accept' function aimed to map the field in mysql with parameters of POJO Target
                    @Override
                    public void accept(PreparedStatement preparedStatement, Target target) throws SQLException {
                        preparedStatement.setInt(1,target.getId());
                        preparedStatement.setString(2,target.getLanguage());
                        preparedStatement.setString(3,target.getFull_name());
                        preparedStatement.setInt(4,target.getForks_count());
                        preparedStatement.setInt(5,target.getStargazers_count());
                        preparedStatement.setInt(6,target.getWatchers_count());
                        preparedStatement.setInt(7,target.getOpen_issues_count());
                        preparedStatement.setString(8, String.valueOf(target.getHas_issues()));
                        preparedStatement.setString(9, String.valueOf(target.getHas_wiki()));
                        preparedStatement.setString(10, target.getCreate_at());
                    }
                },
                //define the execution factors of Jdbc
                JdbcExecutionOptions.builder()
                        .withBatchSize(100)
                        .withBatchIntervalMs(200)
                        .withMaxRetries(5)
                        .build(),
                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withDriverName("com.mysql.jdbc.Driver")
                        .withUrl("jdbc:mysql://localhost:3306/gitee?useSSL=false&useUnicode=true&characterEncoding=utf8")
                        .withUsername("root")
                        .withPassword("123456").build()

        ));
        targetSingleOutputStreamOperator.print();

        env.execute();
    }

    public static void writeFromHdfsToXlsxLocal(String HdfsURL, String destUrl) throws Exception {
        //create the environment of Streaming Process in Flink
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //Get all file as an inputStream from hdfs,
        //in this case, all json files.
        TextInputFormat inputFormat = new TextInputFormat(new Path(HdfsURL));

        //Data access
        DataStreamSource<String> stream = env.createInput(inputFormat);

        //Use function defined in 'JsonToBean' to cast json files into a list of Target(pojo, extraction from root, useful data)
        SingleOutputStreamOperator<List<Target>> targetListStreamOperator = stream.map((MapFunction<String, List<Target>>) s -> {
            StringBuilder sb = new StringBuilder();
            sb.append("{\"list\":");
            sb.append(s);
            sb.append("}");
            String tmp = sb.toString();
            return ManageJsonFromLocal.getJsonAsTargetList(tmp);

        }).returns(Types.LIST(Types.POJO(Target.class))).setParallelism(1);

        //Use flatMap to get all elements in the List from above as a StreamOperator
        SingleOutputStreamOperator<Target> targetStreamOperator = targetListStreamOperator.flatMap((List<Target> targetList, Collector<Target> out) -> {
            for (Target target : targetList) {
                out.collect(target);
            }
        }).returns(Types.POJO(Target.class));

        SingleOutputStreamOperator<String> operator = targetStreamOperator.map(new MapFunction<Target, String>() {
            public String map(Target target) {
                writeIntoExcel(destUrl, target);
                return target.getFull_name();
            }
        });

        operator.print();

        env.execute();
    }

    public static void writeFromHdfsToMysql(String HdfsURL) throws Exception {
        //create the environment of Streaming Process in Flink
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //Get all file as an inputStream from hdfs,
        //in this case, all json files.
        TextInputFormat inputFormat = new TextInputFormat(new Path(HdfsURL));

        //Data access
        DataStreamSource<String> stream = env.createInput(inputFormat);

        //Use function defined in 'JsonToBean' to cast json files into a list of Target(pojo, extraction from root, useful data)
        SingleOutputStreamOperator<List<Target>> targetListStreamOperator = stream.map((MapFunction<String, List<Target>>) s -> {
            StringBuilder sb = new StringBuilder();
            sb.append("{\"list\":");
            sb.append(s);
            sb.append("}");
            String tmp = sb.toString();
            return ManageJsonFromLocal.getJsonAsTargetList(tmp);
        }).returns(Types.LIST(Types.POJO(Target.class))).setParallelism(1);

        //Use flatMap to get all elements in the List from above as a StreamOperator
        SingleOutputStreamOperator<Target> targetStreamOperator = targetListStreamOperator.flatMap((List<Target> targetList, Collector<Target> out) -> {
            for (Target target : targetList) {
                out.collect(target);
            }
        }).returns(Types.POJO(Target.class));

        SingleOutputStreamOperator<Target> formatSingleOutPutOperator = targetStreamOperator.map(new MapFunction<Target, Target>() {
            @Override
            public Target map(Target value) throws Exception {
                SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM");
                String format = simpleDateFormat.format(System.currentTimeMillis());
                return value;
            }
        });
        //the example of sql
        /*"insert into target (id,language,full_name,forks_count,stargazers_count,watchers_count,open_issues_count,has_issues,has_wiki,create_at) values(?,?,?,?,?,?,?,?,?,?);"*/


        //add DataSink to the streamOperator which has dealt with all json file
        formatSingleOutPutOperator.setParallelism(1);
        String sql = "insert into target (id,language,full_name,forks_count,stargazers_count,watchers_count,open_issues_count,has_issues,has_wiki,create_at) values(?,?,?,?,?,?,?,?,?,?);";
        targetStreamOperator.addSink(JdbcSink.sink(sql,new JdbcStatementBuilder<Target>(){
                    //the 'accept' function aimed to map the field in mysql with parameters of POJO Target
                    @Override
                    public void accept(PreparedStatement preparedStatement, Target target) throws SQLException {
                        preparedStatement.setInt(1,target.getId());
                        preparedStatement.setString(2,target.getLanguage());
                        preparedStatement.setString(3,target.getFull_name());
                        preparedStatement.setInt(4,target.getForks_count());
                        preparedStatement.setInt(5,target.getStargazers_count());
                        preparedStatement.setInt(6,target.getWatchers_count());
                        preparedStatement.setInt(7,target.getOpen_issues_count());
                        preparedStatement.setString(8, String.valueOf(target.getHas_issues()));
                        preparedStatement.setString(9, String.valueOf(target.getHas_wiki()));
                        preparedStatement.setString(10, target.getCreate_at());

                    }
                },
                //define the execution factors of Jdbc
                JdbcExecutionOptions.builder()
                        .withBatchSize(100)
                        .withBatchIntervalMs(200)
                        .withMaxRetries(5)
                        .build(),
                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withDriverName("com.mysql.jdbc.Driver")
                        .withUrl("jdbc:mysql://localhost:3306/gitee?useSSL=false&useUnicode=true&characterEncoding=utf8")
                        .withUsername("root")
                        .withPassword("123456").build()

        ));
        targetStreamOperator.print();

        env.execute();
    }

}
