
import org.apache.spark.sql.SparkSession


/**
  *
  *  数据处理阶段
  */

object PreprocessingModel {


  def main(args: Array[String]): Unit = {

  //程序入口
  val spark: SparkSession = SparkSession.builder()
    .master("local[*]")
    .appName("jiaotong")
    .getOrCreate()
    //基站经纬度数据 路径
//     val sjg_path = args(1)
    //原始数据 路径
//     val data_path = args(2)
    //结果数据 保存路径
//     val result_path = args(3)



    //读取静态数据
     spark.read.format("csv")
      .option("delimiter", ",") //分隔符
      .option("qoute", "")
      .option("nullValue", "000")
      .load("input/jwd.csv")
      .select("_c0","_c1","_c2")
      .toDF("longitude","latitude","laci")
      .createTempView("static_table")


    //读取通信数据
    spark.read.format("csv")
      .option("delimiter", ",") //分隔符
      .option("qoute", "")
      .option("nullValue", "000") //处理为空的数据
      .load("input/yuanshi.csv")
      .select("_c0","_c1","_c2","_c3","_c4")
      .toDF("time_id","imsi","lac_id","cell_id","phone")
      .createTempView("traffic_table")


    //数据预处理
    spark.sql(
      """
        |select * from traffic_table
        |where
        |imsi != '000'
        |and imsi not like  '%#%'
        |and imsi not like '%*%'
        |and imsi not like '%^%'
        |and lac_id != '000'
        |and cell_id != '000'
        |
      """.stripMargin).createTempView("traffic_table_end")


    // 数据抽取、时间戳转换格式、去除经纬度为空
      spark.sql(
      """
        |
        |select
        |   from_unixtime(substr(tt.time_id,0,10),'yyyyMMddHHmmss') as time_id,
        |   tt.imsi,
        |   st.longitude,
        |   st.latitude,
        |   tt.lac_id,
        |   tt.cell_id
        | from traffic_table_end tt
        | join static_table st
        | on concat(tt.lac_id,'-',tt.cell_id) = st.laci
        | where
        | st.longitude != '000'
        | and st.latitude !='000'
        |
      """.stripMargin)
        .distinct()
        .createTempView("result")

  //去除干扰数据条目、按时间正序排序
  val data = spark.sql(
    """
      |
      |select
      |  time_id,
      |  imsi,
      |  longitude,
      |  latitude,
      |  lac_id,
      |  cell_id,
      |  row_number() over (partition by imsi order by time_id) number
      |  from  result
      |  where
      |  time_id > '20181003000000'
      |
    """.stripMargin)

    data.createTempView("data_table")



    //    ///////改////////
    val imsi20 = spark.sql(
      """
        |select dt.imsi from (
        |   select imsi,count(1) count
        |  from data_table
        |group by imsi ) dt
        | where dt.count<20
      """.stripMargin)

    imsi20.createTempView("imsi20_table")
    spark.sql("SELECT * FROM imsi20_table").show(100)

    //
//    ///////改////////
//
//    imsi20.show()
//
    val result = spark.sql(
      """
        |select
        |   dt.time_id,
        |   dt.imsi,
        |   dt.longitude,
        |   dt.latitude,
        |   dt.lac_id,
        |   dt.cell_id,
        |   dt.number
        |  from
        |data_table dt
        |left join imsi20_table it
        |on dt.imsi = it.imsi
        | where
        |  it.imsi is null
        |
      """.stripMargin)
//
//
//    print("数据条目"+result.count())
//
    result.coalesce(1).write.csv("output3/out")

  }

}
