/*
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.catalyst.expressions.GenericRow;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import model.CountInfo;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;
import utils.CountInfoDao;

import java.util.Arrays;
import java.util.Iterator;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static org.apache.spark.sql.types.DataTypes.IntegerType;
import static org.apache.spark.sql.types.DataTypes.StringType;

public class WriteTest {
    public static void main(String[] args) {
        SparkConf conf = new SparkConf();
        conf.setMaster("local");
        conf.setAppName("spark_wordcount_java");

        JavaSparkContext sc = new JavaSparkContext(conf);


        JavaRDD<String> lines = sc.textFile("src/main/resources/data");
        JavaRDD<String> words = lines.flatMap(line -> Arrays.asList(line.split(" ")).iterator());
        JavaPairRDD<String, Integer> pairRDD = words.mapToPair(word -> new Tuple2<>(word, 1));
        JavaPairRDD<String, Integer> result = pairRDD.reduceByKey((v1, v2) -> v1 + v2);


        // 构造Schema
        StructField[] array = {
                new StructField("name", StringType, true, null),
                new StructField("age", IntegerType, true, null),
                new StructField("id", IntegerType, true, null)
        };

        StructType structType = new StructType(array);



        Dataset<Row> personDataSet = spark.createDataFrame(users, structType);

        Map<String, String> options = new HashMap<>();
        options.put("driver", "com.mysql.jdbc.Driver");
        options.put("url", "jdbc:mysql://host:port");
        options.put("user", "****");
        options.put("password", "****");
        options.put("dbtable", "****");
        personDataSet.write().mode(SaveMode.Append).options(options).save();

        spark.stop();

    }
}
*/
