
/*
* 数据统计类:
* 足球: (20s)
*   统计球队最近N场比赛战绩
* */

//
//cp FirstSparkApp/out/artifacts/FirstSparkAppJar/firstsparkapp.jar /data/caiqiu/prediction/jar/fb_FBST2017002.jar
//
//nohup ./bin/spark-submit --master spark://skn-pmukvrk0-spark-master:7077 --class caiqr.model.statistics.fb.FBST2017002 --jars /usr/local/spark/lib/spark-redis-0.1.1.jar,/usr/local/spark/lib/mysql-connector-java-5.1.35.jar,/usr/local/spark/lib/jedis-2.7.0.jar --executor-memory 4G /root/fb_FBST2017002.jar save_db_info=172.16.4.17-prediction-caiqiu-Caiqiu502 maxResultSize=4g job_id=8439 spark_id=11 big_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/csv/football_match_500w_2.csv redis_server=172.16.0.67 output_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/result/FBST2017002.csv team_id_list="1146_1194_660" current=20 > /data/caiqiu/log/a.log < /dev/null 2>&1 &
//
//sqoop export  --connect jdbc:mysql://172.16.4.17/prediction --username root --password Caiqiu502 --table FBST2017002 --update-mode allowinsert --update-key "team_id" --fields-terminated-by ','  -export-dir hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/result/FBST2017002.csv
//


//caiqr.model.statistics.fb.FBST2017002

package caiqr.model.statistics.fb

import com.redislabs.provider.redis._
import caiqr.utils.PredictionUtils
import caiqr.utils.PredictionDBUtils
import caiqr.utils.AllFBMatchInputFile
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SQLContext, DataFrame}
import java.text.SimpleDateFormat
import java.sql.DriverManager
import java.lang.IllegalArgumentException

object FBST2017002 {

  def main(args: Array[String]){

    //////////////////////////////// 接收参数 ////////////////////////////////
    // 将参数转换为 Map
    val cmd_list = args.toList
    val cmd = cmd_list.map{p =>
      val items = p.split("=")
      (items(0),items(1))
    }
    val cmd_map = cmd.toMap

    val job_id_res1 = cmd_map.get("job_id")
    val job_id = job_id_res1.getOrElse("")

    val model_id_res1 = cmd_map.get("spark_id")
    val model_id = model_id_res1.getOrElse("")

    val big_file_res1 = cmd_map.get("big_file")
    val big_file = big_file_res1.getOrElse("")
    val save_db_info = cmd_map.get("save_db_info").getOrElse("") //DB info

    val team_id_list_res1 = cmd_map.get("team_id_list")
    val team_id_list_str = team_id_list_res1.getOrElse("")

    val current_cnt = cmd_map.get("current").getOrElse("10") //获取最近N场战绩


    //结果输出文件(HDFS)
    val output_file = cmd_map.get("output_file").getOrElse("")

    if (job_id=="" || model_id=="" || big_file=="") {
      throw new IllegalArgumentException("Spark main args is error. ${args.length}")
    }

    //运行时结果集大小: 默认4g
    val maxResultSize_res1 = cmd_map.get("maxResultSize")
    val maxResultSize = maxResultSize_res1.getOrElse("4g")
    //////////////////////////////// 接收参数 ////////////////////////////////



    // 环境变量
    val conf = new SparkConf().setAppName("FBST2017002")
      .set("spark.driver.maxResultSize", maxResultSize)
    val sc = new SparkContext(conf)
    val sqlContext = new org.apache.spark.sql.SQLContext(sc)

    //Spark.conf.set("spark.sql.crossJoin.enabled", true)

    // hdfs 加载赛事文件, 返回 df
    val match_df = AllFBMatchInputFile.load(sc, sqlContext, big_file)
    //match_df.head(10).foreach(println)

    // 获取当前开售的赛事球队ID列表, 返回 df
    val team_id_list_df = PredictionUtils.transation_team_id_list_to_df(team_id_list_str, sc)
//    team_id_list_df.foreach(println)


    val same_team_match_df = match_df.join(team_id_list_df,
      (match_df("host_id") === team_id_list_df("sporttery_team_id") || match_df("away_id") === team_id_list_df("sporttery_team_id"))).
      orderBy(match_df("match_time").desc).
      selectExpr("sporttery_team_id", "match_id",
        "host_id","away_id","match_time","host_score","away_score",
        "home_match_result","away_match_result","score")
    //same_team_match_df.collect().foreach(println)


    //1.球队最近N场战绩
    calculate_FBST2017002_team_season_win_data(sc, same_team_match_df, output_file, current_cnt.toInt)


    // 更新job 和spark状态
    // spark运行完成,待 sqoop导入DB
    PredictionDBUtils.update_job_spark_status(save_db_info, job_id, model_id)


    sc.stop()
  }


  def calculate_FBST2017002_team_season_win_data(sc: SparkContext, same_team_match_df: DataFrame, output_file: String, current_cnt: Int): Unit ={

    //OUT
    //    (1194,((3,1489240800000),5,4,1,574647))
    //    (1146,((1,1489172400000),0,0,0,574641))
    //    (660,((3,1489167000000),3,2,1,596685))
    //    (1194,((0,1488829500000),3,1,2,574638))
    //    (1146,((0,1488567600000),3,1,2,574629))
    val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val host_away_team_rdd = same_team_match_df.rdd.map { p =>
      val src_team_id = p.getString(0)
      val match_id = p.getString(1)
      val host_id = p.getString(2)
      val away_id = p.getString(3)

      val match_time = p.getString(4)
      val match_time_second = sdf.parse(match_time).getTime

      val host_score = p.getString(5).toInt
      val away_score = p.getString(6).toInt
      val home_match_result = p.getString(7)
      val away_match_result = p.getString(8)
      val score = p.getString(9)


      //(team_id, ((赛果,赛事时间), 总进球, 进球数, 失球数, match_id))
      if (src_team_id == host_id) {
        (src_team_id, ((home_match_result,match_time_second), score, host_score, away_score, match_id))
      }else{
        (src_team_id, ((away_match_result,match_time_second), score, away_score, host_score, match_id))
      }
    }
    //host_away_team_rdd.collect().foreach(println)



    /*
    * OUT:
    * 1194
      ((3,1489240800000),5,4,1,574647)
      ((0,1488829500000),3,1,2,574638)
      ((1,1488389400000),2,1,1,648619)
      ((0,1487962800000),2,0,2,574627)
    * */
    val new_tuple_same_init_odds_index_order_rdd = host_away_team_rdd.groupByKey().map{ p =>
      val sortArray = p._2.toArray.sortWith(_._1._2 > _._1._2).slice(0,current_cnt) //slice: 获取前N场
      (p._1, sortArray)
    }
    //    new_tuple_same_init_odds_index_order_rdd.collect().foreach{p =>
    //      println(p._1)
    //      p._2.foreach(println)
    //    }



    //5. reduce结果集
    //OUT:
    //(1194,(30303101031303133303,
    //  5_4_4_3_3_2_2_2_1_3_2_1_1_3_2_1_3_1_3_3,
    // 27,22,
    // 574608_574597_574536_574496_574487_574401_621586_574398_539108_539099_539060_539049_539014_538989_553578_538916_538845_449765_449757_449680))
    //
    val new_tuple_same_init_odds_rdd = new_tuple_same_init_odds_index_order_rdd.map(p =>
      (p._1, (p._2.map(p => p._1._1).reduce(_.toString+_.toString), //赛果
              p._2.map(p => p._2).reduce(_+"_"+_), // 总进球
              p._2.map(p => p._3).reduce(_.toInt+_.toInt), // 进球数
              p._2.map(p => p._4).reduce(_.toInt+_.toInt), // 失球数
              p._2.map(p => p._5).reduce(_+"_"+_)) // match_id

      )
    )
    //new_tuple_same_init_odds_rdd.collect().foreach(println)


    //6. 汇总最终结果,保存 hdfs
    //OUT:
    //(1146,10003330300111330110,0_3_2_1_1_4_1_2_1_2_2_4_2_2_5_2_3_2_0_4,19,24,574641_574629_648616_574622_574609_574601_574595_574581_574571_574560_574551_628292_574540_574538_628095_574520_574517_574500_574489_574480,20)
    //(team_id,赛果,总进球,进球数,失球数,match_ids,赛果总数)
    val same_init_odds_map_rdd = new_tuple_same_init_odds_rdd.map { p =>
      val team_id = p._1
      val result_size = p._2._1.toString.length.toInt
      (team_id,p._2._1,p._2._2,p._2._3,p._2._4,p._2._5,result_size)
    }
    //same_init_odds_map_rdd.collect().foreach(println)



    //汇总结果集合
    //OUT
    //1146,10003330300111330110,20,6,6,8,24,19,24,30,30,40,60,70,70,0_3_2_1_1_4_1_2_1_2_2_4_2_2_5_2_3_2_0_4,574641_574629_648616_574622_574609_574601_574595_574581_574571_574560_574551_628292_574540_574538_628095_574520_574517_574500_574489_574480)
    val result_rdd = same_init_odds_map_rdd.map{ p =>
      val team_id = p._1
      val results = p._2
      val goal = p._3
      val home_score = p._4
      val away_score = p._5
      val matchids = p._6
      val result_cnt = p._7

      //胜平负场次数
      val win = results.count(_ == '3')
      val draw = results.count(_ == '1')
      val loss = results.count(_ == '0')

      val match_score = win.toInt*3+draw.toInt*1 //积分

      //胜平负率
      val ratio3 = (win.toDouble / result_cnt.toDouble * 100).round.toInt
      val ratio1 = (draw.toDouble / result_cnt.toDouble * 100).round.toInt
      val ratio0 = (loss.toDouble / result_cnt.toDouble * 100).round.toInt
      val ratio31 = ((win.toDouble+draw.toDouble) / result_cnt.toDouble * 100).round.toInt
      val ratio30 = ((win.toDouble+loss.toDouble) / result_cnt.toDouble * 100).round.toInt
      val ratio10 = ((draw.toDouble+loss.toDouble) / result_cnt.toDouble * 100).round.toInt

      (team_id,results,result_cnt,win,draw,loss,match_score,home_score,away_score,ratio3,ratio1,ratio0,ratio31,ratio30,ratio10,goal,matchids)
    }
    //result_rdd.collect().foreach(println)



    //计算结果保存 HDFS
    val result_file_rdd = result_rdd.
      map { case (team_id,results,result_cnt,win,draw,loss,match_score,home_score,away_score,ratio3,ratio1,ratio0,ratio31,ratio30,ratio10,goal,matchids) => Array(team_id,results,result_cnt,win,draw,loss,match_score,home_score,away_score,ratio3,ratio1,ratio0,ratio31,ratio30,ratio10,goal,matchids).mkString(",") }


    if(!result_file_rdd.isEmpty()) {
      result_file_rdd.saveAsTextFile(output_file)
    }



  }


}





