import org.apache.spark.{SparkContext, SparkConf}


/**
  * Created by wuyunpeng on 2016/4/11.
  */
object LogsETL {

  def main(args: Array[String]) {

    val conf = new SparkConf().setAppName("logsETL").setMaster("local[4]")
    val sc = new SparkContext(conf)

    //格式:udid,key=value,key=value,timestamp
   val nginxRdd = NginxLogsEtl.etl(args(0),sc) //0001,method=abc,version=2.0,udid=0001,1459914075000

  }

}
