package title1

import java.sql.{Connection, DriverManager, PreparedStatement}

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD

/**
  * 问题1.计算出各个省的成交量总额（结果保存到MySQL中）
  */
object provinceTotal {
  def main(args: Array[String]): Unit = {
    //1 处理全国ip数据并得到匹配规则

    val conf = new SparkConf().setMaster("local[*]").setAppName(this.getClass.getName)
    val sc = new SparkContext(conf)

    //匹配规则数据源
    val ipRulesData: RDD[String] = sc.textFile("data/ip.txt")
    //把起始位置，终点位置，省份返回
    val ipRulesRDD: RDD[(Long, Long, String)] = ipRulesData.map({
      t =>
        val rules: Array[String] = t.split("[|]")
        val start = rules(2).toLong
        val end = rules(3).toLong
        val province = rules(6)
        (start, end, province)
    })

    val ipRules = ipRulesRDD.collect()
    //将ip匹配规则广播出去
    val ipRuleBC: Broadcast[Array[(Long, Long, String)]] = sc.broadcast(ipRules)

    //2 处理订单数据
    val orderData = sc.textFile("data/订单数据.log")
    val provinceAndAmount: RDD[(String, Int)] = orderData.map({ t =>
      val line: Array[String] = t.split(" ")
      //2.1获取ip
      val ip = line(1)
      val ipnum: Long = IPUtils.ip2Long(ip)
      //2.2获取消费额
      val amount = line(line.length - 1).toInt
      //2.3从广播变量中获取匹配规则
      val ipRulesFromBC: Array[(Long, Long, String)] = ipRuleBC.value
      //2.4根据规则获取ip对应省
      val index: Int = IPUtils.search(ipnum, ipRulesFromBC)
      if (index != -1) {
        val province = ipRulesFromBC(index)._3
        (province, amount)
      } else {
        ("unknown", 0)
      }
    })
    val result: RDD[(String, Int)] = provinceAndAmount.reduceByKey(_ + _)
    //结果存入mysql
    result.foreachPartition({ t =>
      IPUtils.data2Mysql(t, "provinceTotal")
    })
    sc.stop()

  }
}
