package ctrip.utils

import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.sql.{DataFrame, SQLContext}
import scala.io.Source
/**
 * Created by siyi0 on 2017/5/22.
 */
object ETLUtils {

  def fun():Map[String,String]={
    var sql = ""
    var result = ""
    val sparkConf = new SparkConf().setAppName("ETLUtils")//.setMaster("spark://175.102.18.112:7077")
    val sc = new SparkContext(sparkConf)
    val file = Source.fromFile("/mnt/disk/data/ctrip/etl.txt")
    val titles = file.getLines.next.split("\t")
    var resultMap=Map[String,String]()
    import com.databricks.spark.csv._
    val sqlContext = new SQLContext(sc)
    import com.databricks.spark.csv._
    val df_test = sqlContext.load("com.databricks.spark.csv",
      Map("path"->"""/data/ctrip/etl.txt""","header"->"true","delimiter"->"\t"))
    //df_test.registerTempTable("etldata")

    for (title:String <- titles){
      sql = "select avg("+title+") from etldata where "+title+"<>'NULL'"
      //print (sql)
      //printSQL("select avg(ordertype_3_ratio) from etldata where ordertype_3_ratio<>'NULL'")
      result = df_test.sqlContext.sql(sql).rdd.toString()
      resultMap+=(title->(result.toString))
    }
    resultMap
  }
  def main(args: Array[String]): Unit = {
    print("func:",fun())
  }
}

