package cn.ymatrix.hadoopEcology.SparkOperationYmatrix;

import cn.ymatrix.utils.Config;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.functions;

import java.util.Properties;

import static org.apache.spark.sql.functions.col;

public class SparkOperationForYMatrix {

  public static String updateTableName = "updatetable";
  public static String deleteTableName = "deletetable";

  public static void main(String[] args) {

    // JDBC 的连接信息
    Properties connectionProperties = new Properties();
    connectionProperties.put("user", Config.getUserName());
    connectionProperties.put("password", Config.getPassWord());
    connectionProperties.put("driver", Config.getDRIVER_CLASS_NAME());

    String url = Config.getUrl();
    String tableName = Config.getTableName();

    // 本地测试添加master("local")
    SparkSession spark =
        SparkSession.builder()
            .appName("SparkPostgresqlJdbc")
            .config("spark.some.config.option", "some-value")
            .master("local")
            .getOrCreate();

    // 获取链接信息
    Dataset<Row> jdbcDF = spark.read().jdbc(url, tableName, connectionProperties);

    // 按照字段查看数据
    selectAllFiled(jdbcDF);

    // 使用SQL的形式查看数据
    selectSQLAllFiled(jdbcDF, spark);

    // 使用filter的形式查看数据
    selectFilterQuery(jdbcDF);

    // 更新数据
    updateTable(jdbcDF, connectionProperties, url);

    // 删除数据
    deleteTableData(jdbcDF, connectionProperties, url);

    // 释放资源
    spark.stop();
  }

  private static void selectAllFiled(Dataset<Row> jdbcDF) {
    System.out.println("按照字段查询数据的前10条....");
    jdbcDF.select("ts", "vin", "t1", "t2").limit(10);
    jdbcDF.show();
  }

  private static void selectFilterQuery(Dataset<Row> jdbcDF) {
    System.out.println("按照指定字段的值条件查看数据....");
    Dataset<Row> finalData = jdbcDF.filter(col("t1").notEqual("1")).limit(10);
    finalData.show();
  }

  private static void selectSQLAllFiled(Dataset<Row> jdbcDF, SparkSession spark) {
    System.out.println("按照 SQL 语句 查看数据....");
    jdbcDF.createOrReplaceTempView("test");
    Dataset<Row> selectResults = spark.sql("select * from test where vin='1' ");
    selectResults.show();
  }

  private static void updateTable(
      Dataset<Row> jdbcDF, Properties connectionProperties, String url) {
    System.out.println("update 原始数据....");
    jdbcDF.show();

    // 更新数据(update test set t1 = 'test1' where vin = '1' )
    Dataset<Row> updatedData =
        jdbcDF.withColumn(
            "t1", functions.when(col("vin").equalTo(1), "test1").otherwise(col("t1")));

    System.out.println("update 更新后的数据....");
    updatedData.show();

    // 在需要时将 updatedData 保存回数据库
    updatedData.write().jdbc(url, updateTableName, connectionProperties);
  }

  private static void deleteTableData(
      Dataset<Row> jdbcDF, Properties connectionProperties, String url) {

    System.out.println("delete 原始数据....");
    jdbcDF.show();

    System.out.println("delete 更新后的数据....");
    Dataset<Row> newData = jdbcDF.filter(" vin != 1");
    newData.show();
    // 在需要时将 newData 保存回数据库
    newData.write().jdbc(url, deleteTableName, connectionProperties);
  }
}
