//足球-赛事大数据文件加载 From HDFS
// football_match
package caiqr.utils

import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.rdd.RDD


object AllFBMatchInputFile {

  var asia_df: DataFrame = null

  // 加载 足球赛事大文件, 返回 DF.
  def load(sc: SparkContext, sqlContext: SQLContext, filename: String): DataFrame ={
    val people = sc.textFile(filename)
    val schemaString = "match_id,season_id,season_pre,host_id,away_id,group_pre,host_score,away_score,match_time,backup,recommend,spf_cnt,myear,mmonth,home_match_result,away_match_result,score"
    import org.apache.spark.sql.Row
    import org.apache.spark.sql.types.{StringType, StructField, StructType}
    val schema = StructType(schemaString.split(",").map(fieldName => StructField(fieldName, StringType, nullable = true)))

    //赛果计算
    val rowRDD = people.map(_.split("\t")).map { p =>

      val host_score = p(6).toInt
      val away_score = p(7).toInt

      val score = host_score+away_score //总进球
      var home_match_result = 0 //主队赛果胜平负
      var away_match_result = 0 //客队赛果胜平负
      if (host_score>away_score){
        home_match_result = 3
        away_match_result = 0
      }else if (host_score==away_score){
        home_match_result = 1
        away_match_result = 1
      }else{
        home_match_result = 0
        away_match_result = 3
      }

      Row(p(0), p(1), p(2), p(3), p(4), p(5), p(6), p(7), p(8), p(9), p(10), p(11), p(12), p(13),
        home_match_result.toString, away_match_result.toString, score.toString)
    }
    val match_df = sqlContext.createDataFrame(rowRDD, schema)
    match_df
  }







  //带让球数,包含所有竞彩玩法赛果赛事信息
  def load_rq_match_file(sc: SparkContext, sqlContext: SQLContext, filename: String): DataFrame ={
    val people = sc.textFile(filename)
    val schemaString = "match_id,match_time,season_id,match_desc,season_pre,group_pre,host_id,away_id,company_odds,host_half_score,away_half_score,host_score,away_score,result,rq_result,goal,half_all_result,new_result"
    import org.apache.spark.sql.Row
    import org.apache.spark.sql.types.{StringType, StructField, StructType}
    val schema = StructType(schemaString.split(",").map(fieldName => StructField(fieldName, StringType, nullable = true)))


    //赛果计算
    val rowRDD = people.map(_.split("\t")).map { p =>

      val company_odds = p(8).toInt //让球数
      val host_half_score = p(9).toInt
      val away_half_score = p(10).toInt
      val host_score = p(11).toInt
      val away_score = p(12).toInt

      //1.胜平负
      var result = 0
      if (host_score>away_score){
        result = 3
      }else if (host_score==away_score){
        result = 1
      }else{
        result = 0
      }


      //2.让球胜平负
      var rq_result = 0
      if (host_score+company_odds>away_score){
        rq_result = 3
      }else if (host_score+company_odds==away_score){
        rq_result = 1
      }else{
        rq_result = 0
      }




      //3.总进球
      var goal = 0
      if(host_score+away_score>6){
        goal = 7
      }else{
        goal = host_score+away_score
      }


      //4.半全场
      var half_result = 0
      if (host_half_score>away_half_score){
        half_result = 3
      }else if (host_half_score==away_half_score){
        half_result = 1
      }else{
        half_result = 0
      }
      val half_all_result = s"${half_result}${result}"




      //5.比分
      val score = s"${host_score}${away_score}"
      var new_result = score
      if(host_score > away_score) {
        if(host_score > 5 || away_score > 2){
          new_result = "90"
        }
      }else if(host_score == away_score){
        if(host_score > 3){
          new_result = "99"
        }
      }else{
        if(host_score > 2 || away_score > 5){
          new_result = "09"
        }
      }



      Row(p(0), p(1), p(2), p(3), p(4), p(5), p(6), p(7), p(8), p(9), p(10), p(11), p(12),
        result.toString, rq_result.toString, goal.toString, half_all_result.toString, new_result.toString
      )
    }
    val match_df = sqlContext.createDataFrame(rowRDD, schema)
    match_df
  }




}

