package com.eurlanda.spark.cassandra

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
  * Created by zhudebin on 2017/4/6.
  */
object DFTypeDemo {

  def main(args: Array[String]) {

    val conf = new SparkConf().setMaster("local[4]").setAppName("test cassandra")

//    val spark = new SparkContext(conf)

    val spark = SparkSession.builder().config(conf).getOrCreate()

    val df = spark
      .read
      .format("org.apache.spark.sql.cassandra")
      .options(Map( "table" -> "test_special",
        "keyspace" -> "ks",
        "cluster" -> "Test Cluster",
        "cassandra.validate_type" -> "1",
        "engine.spark.cassandra.username" -> "cassandra",
        "engine.spark.cassandra.password" -> "cassandra",
        "engine.spark.cassandra.host" -> "192.168.137.128",
        "engine.spark.cassandra.port" -> "9042"))
      .load() // This Dataset will use a spark.cassandra.input.size of 128

    val arr = df.collect()
    println("---------总数-----------", arr.length)
    for(r <- arr) {
      println("------------------------" + r)
    }

    spark.close()
  }

}
