package com.xujia.demo

import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}

object StarrocksTest {

  def main(args: Array[String]): Unit = {
    val sc = new SparkConf().setMaster("local[*]").setAppName("test")
    val ssc = SparkContext.getOrCreate(sc)
    val spark = SparkSession.builder().config(sc).appName("test").master("local").getOrCreate()

    import spark.implicits._
    // 1. Create a DataFrame from CSV.
//    val data = ssc.parallelize(Seq((1, "starrocks", 100), (2, "spark", 100)))
//    val df = data.toDF("id", "name", "score")

    // Read DataFrame from jdbc
    val jdbcDF = spark.read
      .format("jdbc")
      .option("url", "jdbc:postgresql://193.112.251.197:5432/datacenter")
      .option("dbtable", "dim.dim_xl_org")
      .option("user", "postgres")
      .option("password", "5LMHLV2eqP2zAST1")
      .load()


    // 2. Write to StarRocks by configuring the format as "starrocks" and the following options.
    // You need to modify the options according your own environment.
    jdbcDF.write.format("starrocks")
      .option("starrocks.fe.http.url", "127.0.0.1:8030")
      .option("starrocks.fe.jdbc.url", "jdbc:mysql://127.0.0.1:9030")
      .option("starrocks.table.identifier", "test.dim_xl_org")
      .option("starrocks.user", "root")
      .option("starrocks.password", "")
      .mode("append")
      .save()
  }

}
