package staygraph

import com.graphhopper.matching.TrajPoint
import com.graphhopper.util.shapes.GHPoint
import org.apache.spark
import org.apache.spark.sql.catalyst.dsl.expressions.{DslExpression, StringToAttributeConversionHelper}
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}
import org.spark_project.dmg.pmml.True

import java.text.SimpleDateFormat
import java.util.Date
import java.util.regex.{Matcher, Pattern}
import scala.collection.JavaConverters._


object staygraph {
  def main(args: Array[String]): Unit = {
    //val conf = new SparkConf().set("spark.hadoop.mapreduce.output.fileoutputformat.compress", "false").setAppName("driver")
    val conf = new SparkConf().setMaster("local").setAppName("SparkHiveText")
    val sc = new SparkContext(conf)

    val relpath = System.getenv("SPARK_YARN_STAGING_DIR")

    val config = "hdfs://127.0.0.1:9000/target/sichuan.osm.pbf"
    //val config = "./sichuan.osm.pbf"

    println(config)
    val matcher = sc.broadcast(new BroadcastMatcher.BroadcastMatcher(config))

    val ss = SparkSession
      .builder()
      .appName("my")
      .master("local")
      .getOrCreate()

    //获取 SparkContext
    val sc1 = ss.sparkContext

    //val rdd = sc1.textFile("hdfs://127.0.0.1:9000/input/userhopper.txt")
    //val rdd = sc1.textFile("hdfs://10.28.65.139:9000/user/hive/warehouse/hztyzhcskjyxgs335.db/ods_location_minutes/userhopper.txt")
    val rdd = sc1.textFile("file:///input/JUST-Lorry-Traj.txt")

    //for string arraylist para
/*    val userRdd = rdd.groupBy(line=>line.split(",")(0))
    for (userList <- userRdd.values) {
      var userDataList:java.util.ArrayList[String] = new java.util.ArrayList[String](userList.toList.asJava)
      TrajPoint.generateTrajData(userDataList)
    }*/

    /*val trajList = rdd.map(x => {
      val y = x.split(",")
      (y(0), y(1), y(2), y(3).toDouble, y(4).toDouble, y(5), y(6))
    })*/
    val trajList = rdd.map(x => {
      val y = x.split(",")
      (y(0), "lac", "cell", y(2).toDouble, y(3).toDouble, tranTimeToString(y(1)), "outdoor")
    })

    //trajList.saveAsTextFile("file:///input/lorry-out.txt")
    /*val userList = trajList.groupBy(x => x._1).map(x => {
      val usertrip = x._2.map({
        x => new TrajPoint(x._1, x._2, x._3, x._4, x._5, x._6, x._7)
      }).toList
      var usertrip_java:java.util.ArrayList[TrajPoint] = new java.util.ArrayList[TrajPoint](usertrip.asJava)
      TrajPoint.generateTrajData(usertrip_java)
    })*/

    println("generateTrajData userList.count = " + trajList.count())

    val mapRDD= rdd.map(line=>Row(
      getPhoneDigit(line.split(",")(0)),
      "lac",
      "cell",
      line.split(",")(2),
      line.split(",")(3),
      tranTimeToString(line.split(",")(1)),
      "outdoor"))

    val sf0=new StructField("user_id",StringType,true)
    val sf1=new StructField("lac_id",StringType,true)
    val sf2=new StructField("cell_id",StringType,true)
    val sf3=new StructField("lon",StringType,true)
    val sf4=new StructField("lat",StringType,true)
    val sf5=new StructField("time_id",StringType,true)
    val sf6=new StructField("bts_type",StringType,true)

    val table_sch = new StructType(Array(sf0,sf1,sf2,sf3, sf4, sf5, sf6))

    val df = ss.createDataFrame(mapRDD,table_sch)

    val dd = df.filter("time_id like '20200401%'")

    System.out.println(dd.count())

    dd.coalesce(1).write.option("header", "false").csv("file:///input/lorry-out.txt")
    //df.toPandas().to_csv("sample_file.csv", header=True)

    //df.createTempView("geo")

    //ss.sql("select count(*) from geo").show()

  }

  def tranTimeToString(tm:String) :String={
    val fm = new SimpleDateFormat("yyyyMMddHHmmssSSS")
    val tim = fm.format(new Date(tm.toLong))
    tim
  }

  def getPhoneDigit(userid:String) :String={
    val regEx="[^0-9]"
    val p = Pattern.compile(regEx)
    val m = p.matcher(userid)
    val tim = m.replaceAll("").trim()
    tim
  }
}
