package com.acmedcare.framework.spark.sample;

import static org.apache.spark.sql.functions.col;

import java.util.Properties;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;

public class MySqlSpark {

  protected static void convertMySqlData() {

    String url = "jdbc:mysql://192.168.1.159:3306/bass2?useUnicode=true&characterEncoding=utf-8&allowMultiQueries=true&useAffectedRows=true&useSSL=false";
    SparkSession spark = SparkSession.builder().appName("acmedcare-spark-sql-demo").config("spark.cores.max",10).
        getOrCreate();
    Dataset<Row> passport_data = spark.read().format("jdbc")
        .option("url", url)
        .option("user", "root")
        .option("password", "jzyl2019yw")
        .option("dbtable", "acmedcare_passport_base").load().alias("passport_base");

    Dataset<Row> passportExtensions = spark.read().format("jdbc").option("url", url)
        .option("user", "root")
        .option("password", "jzyl2019yw")
        .option("dbtable", "acmedcare_passport_normal_detail").load().alias("extension");
    Dataset<Row> passport_view = passport_data
        .join(passportExtensions,
            col("passport_base.passport_uid").equalTo(col("extension.passport_uid")), "inner")
        .select("passport_base.passport_uid", "extension.full_name", "passport_account", "mobile").filter(col("full_name").isNotNull());

    passport_view.createOrReplaceTempView("passport_view");
    Dataset<Row> convertData = passport_view.sqlContext().sql(
        "select passport_uid as doctor_id,full_name  as doctor_name,1 sex,10086 phone,362267324195073 department_id from passport_view");

    // convertData.show(10);
    String visitUrl = "jdbc:mysql://192.168.1.159:3306/acmedcare-visit?useUnicode=true&characterEncoding=utf-8&allowMultiQueries=true&useAffectedRows=true&useSSL=false";

    Properties properties = new Properties();
    properties.put("user", "root");
    properties.put("password", "jzyl2019yw");
    convertData.write().mode(SaveMode.Overwrite).jdbc(visitUrl, "visit_doctor_spark", properties);
    spark.stop();
    //spark.close();
  }
}
