import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

/**
 * 旅游区县级地址修改后的es修改
 */
object dws_2_obs {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "500")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")

      .config("es.nodes", "192.168.1.29")
      .config("es.index.read.missing.as.empty", "true")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .config("spark.sql.crossJoin.enabled", "true")
      .master("local[*]")
      //      .enableHiveSupport()
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")


    // 读取pg中的数据
    val tab = spark.read.format("jdbc")
      .option("url", "jdbc:postgresql://192.168.2.234:8000/postgres")
      .option("user", "sysadmin")
      .option("password", "Bigdata@123")
      .option("dbtable",
        """
          |detail."o2o_detail_douyin_2021"
          |""".stripMargin)
      .load()
    //    tab.show(1)

    tab.coalesce(10).write
      .orc("s3a://dws-data/g_data/douyin/oss/2021/6")


    val tab1 = spark.read.format("jdbc")
      .option("url", "jdbc:postgresql://192.168.2.234:8000/postgres")
      .option("user", "sysadmin")
      .option("password", "Bigdata@123")
      .option("dbtable",
        """
          |detail."o2o_detail_douyinlive_2021"
          |""".stripMargin)
      .load()
    //    tab1.show(1)

    tab1.coalesce(10).write
      .orc("s3a://dws-data/g_data/douyin/dws/2021/6")



    sc.stop()


  }


}
