package com.isunimp.sample.spark;

import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.*;

import java.util.Arrays;

/**
 * MySQL class
 *
 * @author renguiquan
 * @date 2019/3/13
 */
public class MySQL {
    final static Logger LOGGER = Logger.getLogger(MySQL.class);

    static public class Task {
        private Integer id;
        private String task_id;
        private String state;

        public Integer getId() {
            return id;
        }

        public void setId(Integer id) {
            this.id = id;
        }

        public String getTask_id() {
            return task_id;
        }

        public void setTask_id(String task_id) {
            this.task_id = task_id;
        }

        public String getState() {
            return state;
        }

        public void setState(String state) {
            this.state = state;
        }
    }

    public static void main(String[] args) {
        System.out.println(String.format("参数：%s", args));
        SparkSession session = SparkSession.builder()
                .appName("mysql spark")
//                .master("local")
                .getOrCreate();
        JavaSparkContext javaSparkContext = JavaSparkContext.fromSparkContext(session.sparkContext());
        JavaRDD<Integer> rdd = javaSparkContext.parallelize(Arrays.asList(1, 2, 3, 4));
        JavaRDD<Integer> rdd1 = rdd.filter(o -> {
            LOGGER.info(String.format("参数：%s", args));
            return o % 2 == 0;
        });
        rdd1.saveAsTextFile("mysql");

//        String url = "jdbc:mysql://47.106.37.46:3306/spark";
//        Properties properties = new Properties();
//        properties.put("user", "root");
//        properties.put("password", "Lichenxin1!");
//        Dataset<Row> jdbcDF = session.read()
//                .jdbc(url, "task", properties);
//        jdbcDF.select("id", "task_id", "state").show();
    }
}
