package cn.lecosa.spark.hive

import java.io.FileInputStream
import java.util.Properties
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.hive.HiveContext

object HiveDemo1 {
  def main(args: Array[String]): Unit = {
    //    System.setProperty("spark.sql.warehouse.dir", "hdfs://park01:9000/user/hive/warehouse");
    //    Logger.getLogger("org.apache.spark").setLevel(Level.WARNING)
    //    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
    
//    获取 shell中export的值 也可以调用shell echo 放在最后一行 java来获取最后一行的值
    val path = System.getenv("cfg_file");
    println(path);
    val properties =new Properties();
    properties.load(new  FileInputStream(path))
     println("name==="+properties.getProperty("name")) 
    
    val conf = new SparkConf().setAppName(this.getClass.getName).setMaster("local[*]")
    val sc = new SparkContext(conf)
    val hiveContext = new HiveContext(sc);
    hiveContext.sql("show databases").show()
    //    hiveContext.sql("use lecosa").show()
    //    hiveContext.sql("show tables").show()

    val res = hiveContext.sql(" select * from lecosa.emp limit ")
    res.show()
    //    res.map(line=>line(0))
    //      val sqlContext = new SQLContext(sc)
    import hiveContext.implicits._
    val res1 = res.map(teenager =>
      teenager.getAs[Int]("empno") + teenager.getAs[String]("ename") + teenager.getAs[String]("job"))
    //    import hiveContext.implicits._
    //    val res3 = res.map(teenager => teenager.getValuesMap[Any](List("name", "age")))

    val res2 = res1.rdd.collect()
    res2.foreach { x =>
      println("___  " + x);
    }

    println("====" + res2)
    //    BufferedWriterDemo.write(res2)
    //      res.toDF("").map(teenager => "Name: " + teenager(0)).show()
    //        val res1 = res.map { row => row.getString(1) }

    //    res.rdd.repartition(1).saveAsTextFile("D:/spark/workspace/bigdata/data/hive")
    //        res.rdd.repartition(1).saveAsTextFile("file:///home/software/data/df.txt");
    //    	val cars = sqlContext.csvFile("cars.csv")
    sc.stop()
  }
}