package cn.lecosa.spark.hive

import org.apache.spark.sql.SQLContext
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import java.util.logging.Level
import java.util.logging.Logger
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.SaveMode
import java.util.Properties
object Mysql2Hive {
  def main(args: Array[String]): Unit = {
    
        val prop = new Properties()
    prop.put("user", "root")
    prop.put("password", "root")
    
    
    //    System.setProperty("spark.sql.warehouse.dir", "hdfs://park01:9000/user/hive/warehouse");
    Logger.getLogger("org.apache.spark").setLevel(Level.WARNING)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
    val conf = new SparkConf().setAppName(this.getClass.getName).setMaster("local[*]")
    val sc = new SparkContext(conf)
    val hiveContext = new HiveContext(sc);
    hiveContext.sql("show databases").show()
    hiveContext.sql("use lecosa").show()
    hiveContext.sql("show tables").show()

    val sqlContext = new SQLContext(sc);
    import sqlContext.implicits._
    val jdbcdf = sqlContext.read.format("jdbc").options(Map("url" -> "jdbc:mysql://park01:3306/test", "driver" -> "com.mysql.jdbc.Driver", "dbtable" -> "test.employer", "user" -> "root", "password" -> "root")).load()
    jdbcdf.createTempView("emp")
    hiveContext.sql(" DROP TABLE IF EXISTS lecosa.emp ").show()

    hiveContext.sql("create table lecosa.emp as select * from emp").show()
//    增加一列    自增列
    import org.apache.spark.sql.functions._
    val columnDf =hiveContext.sql("desc  TABLE emp ") ;//.withColumn("seq_id", monotonically_increasing_id() + 1);
    val fun1= (str:String)=>"good".toString().toLowerCase();
    val addCol =udf(fun1)
    val newDf =columnDf.withColumn("tabname", addCol(columnDf("col_name")));
    
    newDf.show()
//    val dataResult = newDf.write.mode(SaveMode.Append).jdbc("jdbc:mysql://park01:3306/test", "emp", prop) // 表可以不存在

    sc.stop()

  }
}


