package advertising.km

import advertising.handler.{FieldValueHandler, LBSHandler}

import org.apache.commons.lang3.StringUtils
import org.apache.http.impl.client.{CloseableHttpClient, HttpClients}
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

//  .--,       .--,
// ( (  \.---./  ) )
//  '.__/o   o\__.'
//     {=  ^  =}
//      >  -  <
//     /       \
//    //       \\
//   //|   .   |\\
//   "'\       /'"_.-~^`'-.
//      \  _  /--'         `
//    ___)( )(___
//   (((__) (__)))    高山仰止, 景行行止; 虽不能至, 心向往之.
//                    High mountains, scenery. Although not to, the heart yearns for it.

/**
  * 构建商圈知识库
  */
object BusinessKnowledgeManager {

  def main(args: Array[String]): Unit = {

    if(args.length < 1){
      System.err.print("未配置数据输入路径")
      System.exit(1)
    }

    // 原始与数据与文档有出入
    // 这里应为 row.log
    val inputLog = args(0)


    val conf = new SparkConf()
      .setAppName("BusinessKnowledgeManager")
    val spark = SparkSession.builder().config(conf).getOrCreate()
//    spark.sparkContext.setLogLevel("WARN")

    // 数据有问题 与文档不对应
    // 只能从原始数据中获取有用的经纬度数据
    val rawData = spark.read.textFile(inputLog)

    import spark.implicits._
    rawData.map(row => {
      val arr = StringUtils.split(row, ",")
      // 经纬度
      import advertising.boost.SuperNumber._
      (arr(22).toDoubleOrElse0(), arr(21).toDoubleOrElse0())
    }).filter(tuple => {
      val latitude = tuple._1
      val longitude = tuple._2
      // 经纬度规定到中国范围内
      longitude >= 73 && longitude <= 135 && latitude >= 3 && latitude <= 53
    }).foreachPartition(iterator => {
      val httpClient = HttpClients.createDefault()
      while(iterator.hasNext){
        val tuple = iterator.next()
        val latitude = tuple._1
        val longitude = tuple._2
        // 商圈信息
        val business = LBSHandler.getBusinessFromBaidu(latitude, longitude, httpClient)
        if(business != null){
          // 获取八位 geo hash
          val geoHash = LBSHandler.getGeoHash(latitude, longitude)
          LBSHandler.save2DB(geoHash, business)
        }
      }
    })
    spark.stop()
  }

}
