package com.cnlive.logpare.comm

import com.cnlive.logpare.util.Constant
import com.cnlive.logpare.util.IPSeekerExt
import java.text.SimpleDateFormat
import java.util.Locale
import scala.collection.mutable.HashMap
import org.apache.spark.sql.Row
import com.cnlive.logpare.util.ParseLogUtil
import scala.collection.mutable.HashSet
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.SparkSession
import org.apache.spark.SparkConf

trait CommRun {
   val nginxDateFormat=new SimpleDateFormat("dd/MMM/yyyy:HH:mm:ss",Locale.ENGLISH)
   val tableNinxDateFormat=new SimpleDateFormat("yyyyMMddHHmmss",Locale.ENGLISH)
   //运行环境 1.本地 2.本地测试环境 3.真实测试环境
   def getEnv(args: Array[String])=args  match {
     case Array(x,_*) => x.toInt
     case _ =>1 
   }
   def getAppName(defaulf:String,date:String)={
     if(date.length()>0){
       defaulf+"_"+date
     }else{
       defaulf
     }
   }
   //处理日志文件日期
   def getDate(args: Array[String])=args  match {
     case Array(_,y,_*) => y
     case _ =>""
   }  
   val warehouseLocation= "spark-warehouse"
   def getSparkSql(env:Int,conf:SparkConf)={
     env  match {
       case 1 => 
         SparkSession.builder().config(conf).getOrCreate()
       case _=>SparkSession.builder().config(conf)
           .config("spark.sql.warehouse.dir", warehouseLocation)
           .enableHiveSupport().getOrCreate()
     }
     
   }
 
   /**
    * 获取解析ip的文件路径
    */
   def getQqwryPath(env:Int)=env match{
                case 2 =>Constant.qqwryLocalServer
                case 3 =>Constant.qqwryServer
                case _=>""
   }
   def getFilePath(env:Int,logType:String,date:String):String=(env,logType) match{
      case (1,"app.js")=>"file:///F:/工作资料/appceshi/*/*"
      case (2,"app.js")=>"hdfs://sxt/spark/test/input/logs/app/*"
      case (3,"app.js")=>"hdfs://sxt/nginx_logs/app/offline/"+date+"/*"
      case (1,"playBy")=>"file:///F:/工作资料/play/2017-06-19/*"
      case (2,"playBy")=>"hdfs://sxt/spark/test/input/logs/play/*"
      case (3,"playBy")=>"hdfs://sxt/nginx_logs/play/offline/"+date+"/*"
      case (1,"page.js")=>"file:///F:/工作资料/pagenew/82page_access_201706261300.log"
      case (2,"page.js")=>"hdfs://sxt/spark/test/input/logs/page/*"
      case (3,"page.js")=>"hdfs://sxt/nginx_logs/page/offline/"+date+"/*"
   }
   //获得IP解析类
   def getIpSeekerExt(env:Int,qqwryPath:String):IPSeekerExt=env match {
        case 1 =>  IPSeekerExt.getInit()
        case 2 =>  IPSeekerExt.getInit(true,qqwryPath)
        case 3 =>  IPSeekerExt.getInit(true,qqwryPath) 
   }
   
  
}