package com.xwj.myclass

import java.util.Properties

import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by thinkjoy on 2017/8/9.
  */
class MyConnection {
  var conf: SparkConf = null
  var spark: SparkContext = null
  var stream: StreamingContext = null
  var sqlContext:SQLContext=null
  val url = "jdbc:mysql://139.199.178.182:3306/sparktest?useUnicode=true&characterEncoding=UTF-8&useSSL=false&zeroDateTimeBehavior=convertToNull"
  val username = "xuweijie"
  val password = "xuweijie"
  val driver="com.mysql.jdbc.Driver"

  /**
    * 连接远程spark集群
    */
  def connect(): SparkContext = {
    try {
      conf = new SparkConf().setAppName("ScalaSpark").setMaster("spark://192.168.0.78:7077")
      conf.set("spark.executor.memory", "512m")
        .setJars(List("G:\\Scala_project\\ScalaSpark\\out\\artifacts\\ScalaSpark_jar\\ScalaSpark.jar"))
      spark = new SparkContext(conf)
      println(spark.sparkUser + "连接成功")
      spark
    } catch {
      case e:
        NoSuchMethodError => println("scala版本不同导致失败" + e.printStackTrace())
        null
      case e:
        Exception => println("连接失败" + e.printStackTrace())
        null
    }
  }

  /**
    * 初始化Spark Streaming
    *
    * @return
    */
  def StreamConnect(): StreamingContext = {
    try {
      conf = new SparkConf().setAppName("ScalaSpark").setMaster("spark://192.168.0.78:7077")
      conf.set("spark.executor.memory", "512m")
        .setJars(List("G:\\Scala_project\\ScalaSpark\\out\\artifacts\\ScalaSpark_jar\\ScalaSpark.jar"))
      stream = new StreamingContext(conf, Seconds(2))
      stream
    } catch {
      case e:
        NoSuchMethodError => println("scala版本不同导致失败" + e.printStackTrace())
        null
      case e:
        Exception => println("连接失败" + e.printStackTrace())
        null
    }
  }

  /**
    * spark sql的连接
    * @return
    */
  def SparkSqlConnect(tablename:String):DataFrame={
    conf = new SparkConf().setAppName("ScalaSpark").setMaster("spark://192.168.0.78:7077")
    conf.set("spark.executor.memory", "512m")
      .setJars(List("G:\\Scala_project\\ScalaSpark\\out\\artifacts\\ScalaSpark_jar\\ScalaSpark.jar"))
    spark = new SparkContext(conf)
    sqlContext=new SQLContext(spark)
    val properties=new Properties()
    properties.put("driver", driver)
    properties.put("user",username)
    properties.put("password",password)
    var result=sqlContext.read.jdbc(url,tablename,properties)
    result
  }
}
