package com.central.test

import java.util

import com.alibaba.fastjson.{JSONArray, JSONObject}
import com.central.controller.ActivityTime
import com.central.service.{ FollowService, FollowingConditionEntity, FollowingSiteConditionEntity}
import com.central.utils.LoadESData
import org.apache.spark.sql.SparkSession

object TestFollow {
  def main(args: Array[String]): Unit = {
    try {
      if (args.length == 0) {
        //        val jsb: JSONObject = JSON.parseObject(args.apply(0))
        val jobId = "1" //jsb.get("jobId") + ""
        // jsb.get("idNumberType") + ""


        val idNumberType = "licenseplate"
        val idNumber = "5_1_A1J278"

//        val idNumberType = "imsi"
//        val idNumber = "460098832876932"


        var acttime: JSONArray = new JSONArray()
        val time: JSONObject = new JSONObject()
        time.put("days","2020-03-01~2020-03-11")
        time.put("time","10:43:06~11:43:06")
        acttime.add(time)
        val activityTimes = acttime.toArray().map(jobj => {
          val timeMap = jobj.asInstanceOf[util.Map[String, Object]]
          val day = String.valueOf(timeMap.get("days"))
          val time = String.valueOf(timeMap.get("time"))
          ActivityTime(day, time)
        }).toList
        //jsb.get("idNumber") + ""
        val beginTime = 1582010709000L  //(jsb.get("begintime") + "").toLong
        val endTime = 1584516309000L //(jsb.get("endtime") + "").toLong
        val slice = 300
        //(jsb.get("slice") + "").toInt
        val deviceNumbers = "7,1,3"
        //jsb.get("deviceNumbers") + ""
        val threshold = 2
        val hasliferow = 0+""
        val fuzzyway =0+""

        // (jsb.get("threshold") + "").toInt
        val condition = FollowingSiteConditionEntity(idNumberType, idNumber, beginTime, if (endTime <= beginTime) Long.MaxValue else endTime, slice, deviceNumbers,hasliferow,fuzzyway,activityTimes) //如果结束时间和开始时间相等,则取最大Long值
        val properties = LoadESData.loadProperties();

        val para1 = SparkSession
          .builder()
          .appName(jobId)
          //          .master("spark://10.68.2.240:6066")
          .master("local")
          .config("es.nodes", properties.getProperty("elasticsearch.host", "192.168.5.180"))
          .config("es.port", properties.getProperty("elasticsearch.port", "9200"))
          .config("es.mapping.date.rich", "false")

          .getOrCreate()
        para1.sparkContext.setLogLevel("ERROR")
//        new ClueService().following(para1, jobId, condition, threshold)
        new FollowService().followAll(para1, jobId, condition, threshold)
      } else {
      }
    }
    catch {
      case e: Exception => {
        println(e.getMessage)
      }
    }
  }
}
