package practice.library.spark_cassandra_connector

import org.apache.spark.{SparkConf, SparkContext}
import utility.CustomLogger

/**
  * Created by lovepocky on 16/7/15.
  */
object test extends CustomLogger {
	def main(args: Array[String]) {
		quickStartGuide()
	}

	def quickStartGuide() = {
		loggers.console.info(s"Example from https://github.com/datastax/spark-cassandra-connector/blob/master/doc/0_quick_start.md")

		val conf = new SparkConf(true)
			.set("spark.cassandra.connection.host", "192.168.1.160")
			//.set("spark.executor.extraLibraryPath", "/opt/spark-extra/spark-cassandra-connector_2.11-1.6.0.jar")
			//.set("spark.executor.extraClassPath", "/opt/spark-extra/spark-cassandra-connector_2.11-1.6.0.jar:/opt/spark-extra/cassandra-driver-core-3.0.0.jar")
			.set("spark.executor.extraClassPath", "/opt/spark-extra/*")
			//.set("spark.executor.extraClassPath", "/opt/spark-extra/spark-cassandra-connector-1.6.0-s_2.11.jar")

		val sc = new SparkContext("spark://192.168.1.160:7077", "test", conf)


		import com.datastax.spark.connector._

		val rdd = sc.cassandraTable("test", "kv")
		loggers.console.info(rdd.count().toString)
		loggers.console.info(rdd.first().toString())
		loggers.console.info(rdd.map(_.getInt("value")).sum().toString)
	}
}
