package cn.smegz

import java.util.Properties

import cn.smegz.clickhouse.{FWithdraw, FWithdrawRepo}
import com.datastax.spark.connector.rdd.CassandraRDD
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.junit.Test
import org.junit.runner.RunWith
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.test.context.junit4.SpringRunner

@RunWith(classOf[SpringRunner])
@SpringBootTest
class SparkConnectClick {

  @Autowired
  var springRepo : FWithdrawRepo = _

  @Autowired
  var spark : SparkSession = _

  @Test
  def readTable(): Unit ={
    val fBidRdd = spark.read.format("jdbc")
      .option("url", "jdbc:clickhouse://10.1.1.237:8123/qiandw_data")
      .option("dbtable", "f_bid")
      .option("driver", "ru.yandex.clickhouse.ClickHouseDriver")
      .option("user", "default")
      .option("password", "111111").load()
    fBidRdd.show()
  }

  @Test
  def readTableCassandra: Unit ={
    import com.datastax.spark.connector._
    val rTableSchema = spark.sparkContext.cassandraTable("system_schema", "tables")
      .where("keyspace_name = ?","qiandw_test").select("table_name")
    val rTableSchemaRdd: CassandraRDD[String] = rTableSchema.as((s : String) => s)
    val list = rTableSchemaRdd.collect().toList
    print(list)
  }

  @Test
  def writeTable(): Unit ={
    val newFWithdrawObj = new FWithdraw {
      this.qdw_id = "QDW123456"
      this.age = 21
      this.amount = 2500.00
      this.batch = "AS122-12345S-12158-RWE52"
      this.client = "PC"
      this.d_date = "2018-10-29"
      this.day_of_month = 12
      this.week_of_month = 1
      this.dep_id = 8
      this.dep_name = "运营中心"
      this.emp_id = 250
      this.emp_name = "520组合"
      this.gender = 23
      this.i_count = 6
      this.month_of_year = 6
      this.pass_amount = 2000.00
      this.pass_count = 2
      this.quarter = 12
    }
    val fWithdrawRdd = spark.sparkContext.parallelize(List(newFWithdrawObj))
    val fWithdrawDf = spark.createDataFrame(fWithdrawRdd, classOf[FWithdraw]).cache()
    fWithdrawDf.createTempView("temple_value")
    spark.sql("select * from temple_value").show()
    val properties = new Properties()
    properties.setProperty("driver", "ru.yandex.clickhouse.ClickHouseDriver")
    properties.setProperty("user", "default")
    properties.setProperty("password", "111111")
    fWithdrawDf.write.mode(SaveMode.Append)
      .jdbc("jdbc:clickhouse://10.1.1.237:8123/qiandw_data","f_withdraw", properties)
  }

  /**
    * spring boot 连接一套
    */
  @Test
  def springJpaRead(): Unit ={
    import scala.collection.JavaConverters._
    println(springRepo.findAll().asScala)
  }
}
