package main.scala.demo.sogou

import org.apache.spark.{SparkConf, SparkContext}

/**
  * SogouLogMain
  *
  * @author zhangyimin
  *         2018-11-01 上午10:40
  * @version 1.0
  */
object SogouLogMain {

  def main(args: Array[String]): Unit = {

    // spark-submit  \
    // --class demo.SogouLogMain \
    // --master yarn-cluster \
    // --executor-memory 4G \
    // --num-executors 10 \
    // /Users/zhangyimin/IdeaProjects/sougoLog_mr/out/artifacts/sougoLog_mr_jar/sougoLog_mr.jar \
    // hdfs://10.6.7.36:9000/data/input/sogou/SogouQ.txt \
    // hdfs://10.6.7.36:9000/data/output/sogou/spark/sogou \

// hiveJ建表语句
//    create table sogoulog (accesstime string,userID string,keyword string,no1 int,clickid int ,url string) row format delimited fi
//    elds terminated by ',';
//HIVE导入数据
//    load data local inpath  '/Users/zhangyimin/training/data/output/SogouQ_Res/******' into table sogoulog;








    //创建SparkStreaming对象
    //    Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)
    //    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)

    val conf = new SparkConf().setAppName("sogouLog").setMaster("local")

    val sc = new SparkContext(conf)
    //    val context = sc.textFile(args(0))
//    val context = sc.textFile("/Users/zhangyimin/training/data/input/SogouQ.txt").repartition(1)
        val context = sc.textFile("hdfs://10.6.7.36:9000/data/input/sogou/SogouQ.txt").repartition(1)

    val afterContext = context.map(_.split("\t")).filter(_.length == 6)
    val res = afterContext.filter(_ (3).toInt == 1).filter(_ (4).toInt == 2)
    val finalRes=res.map(_.mkString(","))
    println(finalRes.count());
    finalRes.take(3)
    //    res.saveAsTextFile(args(1))
    finalRes.saveAsTextFile("hdfs://10.6.7.36:9000/data/output/sogou/spark/sogou")

//    finalRes.saveAsTextFile("/Users/zhangyimin/training/data/output/SogouQ_Res")
    sc.stop();


  }

}
