package com.zhang.sparktool;

import com.zhang.sparktool.config.EnvConfig;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.*;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructType;

import java.util.*;

public class DatasetsCompareTool {
    private static Broadcast<List<Row>> broadcast;

    private static final String url = EnvConfig.config().getString("mysql.local.url");

    public static void main(String[] args) {

        Logger.getLogger("org.apache.spark").setLevel(Level.ERROR);
        SparkSession spark = SparkSession.builder().appName("DatasetsCompareTool")
                .master("local[*]")
                .getOrCreate();
        JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());

        SQLContext sqlContext = new SQLContext(sc);

        /**
         List<String> dataA = Arrays.asList("张三A", "李四A");
         JavaRDD<String> rddA = sc.parallelize(dataA);

         List<String> dataB = Arrays.asList("张三B", "李四B", "王五B", "赵六B", "黎明B");
         JavaRDD<String> rddB = sc.parallelize(dataB);
         */
        Dataset<Row> datasetA = spark.read().option("header", true).csv("D:\\develop-workspace\\spark-datases-compare\\src\\main\\data\\userA.csv");
        JavaRDD<Row> rddA = datasetA.javaRDD();

        Dataset<Row> datasetB = spark.read().option("header", true).csv("D:\\develop-workspace\\spark-datases-compare\\src\\main\\data\\userB.csv");
        JavaRDD<Row> rddB = datasetB.javaRDD();

        JavaRDD<String> totalResult = null;
        int count = 0;
        List<Row> tempArticleList = new ArrayList<>();

        for (Row article : rddB.collect()) { //BCollect：B数据集collect结果
            tempArticleList.add(article);
            count++;

            if (tempArticleList.size() >= 100) {
                broadcast = sc.broadcast(tempArticleList);
                System.out.println("size of broadcast:" + tempArticleList.size());
                // baseRddx--A数据集
                JavaRDD<String> resultRDDx = rddA.flatMap(new CompareArticle());
                if (totalResult == null) {
                    totalResult = resultRDDx;
                } else {
                    totalResult = totalResult.union(resultRDDx);
                }
                count = 0;
                tempArticleList = new ArrayList<>();

            }
        }


        if (tempArticleList.size() > 0) {
            broadcast = sc.broadcast(tempArticleList);
            System.out.println("size of broadcast:" + tempArticleList.size());
            JavaRDD<String> resultRDDx = rddA.flatMap(new CompareArticle());
            if (totalResult == null) {
                totalResult = resultRDDx;
            } else {
                totalResult = totalResult.union(resultRDDx);
            }
        }

        assert totalResult != null;
        for (String s : totalResult.collect()) {
            System.out.println(s);
        }

        readMySQL(sqlContext);

        insertDataIntoMySQL(sc, sqlContext);

        spark.close();
    }

    private static void readMySQL(SQLContext sqlContext) {

        //查找的表名
        String table = "merge_config";

        //SparkJdbc读取Postgresql的products表内容
        System.out.println("读取test数据库中的user_test表内容");
        // 读取表中所有数据
        Dataset<Row> dataset = sqlContext.read().jdbc(url, table, getProperties()).select("*");
        //显示数据
        dataset.show();
    }

    private static void insertDataIntoMySQL(JavaSparkContext sparkContext, SQLContext sqlContext) {
        //写入的数据内容
        JavaRDD<String> personData = sparkContext.parallelize(Arrays.asList("1 tom 5", "2 jack 6", "3 alex 7"));
        //数据库内容
        String url = "jdbc:mysql://localhost:3306/test";
        /**
         * 第一步：在RDD的基础上创建类型为Row的RDD
         */
        //将RDD变成以Row为类型的RDD。Row可以简单理解为Table的一行数据
        JavaRDD<Row> personsRDD = personData.map(row -> {
            String[] split = row.split(" ");
            return RowFactory.create(Integer.valueOf(split[0]), split[1], Integer.valueOf(split[2]));
        });

        /**
         * 第二步：动态构造DataFrame的元数据。
         */
        List structFields = new ArrayList();
        structFields.add(DataTypes.createStructField("id", DataTypes.IntegerType, true));
        structFields.add(DataTypes.createStructField("name", DataTypes.StringType, true));
        structFields.add(DataTypes.createStructField("age", DataTypes.IntegerType, true));

        //构建StructType，用于最后DataFrame元数据的描述
        StructType structType = DataTypes.createStructType(structFields);

        /**
         * 第三步：基于已有的元数据以及RDD<Row>来构造DataFrame
         */
        Dataset<Row> dataset = sqlContext.createDataFrame(personsRDD, structType);

        /**
         * 第四步：将数据写入到person表中
         */
        dataset.write().mode("append").jdbc(url, "person", getProperties());
    }

    private static Properties getProperties() {
        String user = EnvConfig.config().getString("mysql.local.user");
        String password = EnvConfig.config().getString("mysql.local.password");
        String driver = EnvConfig.config().getString("mysql.driver");
        Properties connectionProperties = new Properties();
        connectionProperties.put("user", user);
        connectionProperties.put("password", password);
        connectionProperties.put("driver", driver);

        return connectionProperties;
    }


    private static class CompareArticle implements FlatMapFunction<Row, String> {

        @Override
        public Iterator<String> call(Row s) {
            List<String> resultList = new ArrayList<>();
            List<Row> articleList = broadcast.getValue();
            for (Row s1 : articleList) {
                if (s1.get(0).equals(s.get(0))) {
                    resultList.add(s1.mkString(",") + "--" + s.mkString(","));
                }
            }
            System.out.println(s.mkString(","));
            return resultList.iterator();
        }
    }
}


