//足球-赛事大数据文件加载 From HDFS
// football_match
package caiqr.utils

import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.rdd.RDD


object AllBKMatchInputFile {

  var asia_df: DataFrame = null

  // 加载 篮球赛事大文件, 返回 DF.
  def load(sc: SparkContext, sqlContext: SQLContext, filename: String): DataFrame ={
    val people = sc.textFile(filename)
    val schemaString = "match_id,season_id,season_pre,host_id,away_id,group_pre,host_score,away_score,match_time,backup,recommend,myear,mmonth,home_match_result,away_match_result,score"
    import org.apache.spark.sql.Row
    import org.apache.spark.sql.types.{StringType, StructField, StructType}
    val schema = StructType(schemaString.split(",").map(fieldName => StructField(fieldName, StringType, nullable = true)))

    //赛果计算
    val rowRDD = people.map(_.split("\t")).map { p =>

      val host_score = p(6).toInt
      val away_score = p(7).toInt

      val score = host_score+away_score //总进球
      var home_match_result = 0 //主队赛果胜平负
      var away_match_result = 0 //客队赛果胜平负
      if (host_score>away_score){
        home_match_result = 3
        away_match_result = 0
      }else{
        home_match_result = 0
        away_match_result = 3
      }

      Row(p(0), p(1), p(2), p(3), p(4), p(5), p(6), p(7), p(8), p(9), p(10), p(11), p(12),
        home_match_result.toString, away_match_result.toString, score.toString)
    }
    val match_df = sqlContext.createDataFrame(rowRDD, schema)
    match_df
  }


}

