package sparksql

import org.apache.spark.sql.SparkSession

object SparkFormatJob {

  def main(args: Array[String]): Unit = {
//    val spark = SparkSession.builder().appName("SparkFormatJob").master("local[2]").getOrCreate();
//    val access = spark.sparkContext.textFile("F:\\lzc\\SparkSQL\\sparksql\\src\\main\\scala\\sparksql\\10000.log");

    val spark = SparkSession.builder().appName("SparkFormatJob").master("local[2]").getOrCreate();
    val access = spark.sparkContext.textFile("F:\\lzc\\SparkSQL\\sparksql\\src\\main\\scala\\sparksql\\10000.log");

    access.map(line => {
      var split = line.split(" ");
      val ip = split(0);
      val time = split(3) + " " + split(4);
      val url = split(11).replace("\"","");
      val trafic = split(9);
      DateUtils.parse(time) + "\t" + url + "\t" + ip + "\t" + trafic
    }).saveAsObjectFile(args(1));
    spark.stop();
  }
}
