
/*
* 数据统计类:
* 足球: (30s)
*   1). 球队赛季主客场胜率
*     例如: 足球-巴萨-2016/17-西甲-主场胜率/客场胜率
*   2). 球队赛季主客场进球/失球数
*   3). 球队赛季主客场积分
*
* */

//cp FirstSparkApp/out/artifacts/FirstSparkAppJar/firstsparkapp.jar /data/caiqiu/prediction/jar/fb_FBST2017001.jar
//
//
//nohup ./bin/spark-submit --master spark://skn-pmukvrk0-spark-master:7077 --class caiqr.model.statistics.fb.FBST2017001  --jars /usr/local/spark/lib/spark-redis-0.1.1.jar,/usr/local/spark/lib/mysql-connector-java-5.1.35.jar,/usr/local/spark/lib/jedis-2.7.0.jar  --executor-memory 4G  /root/fb_FBST2017001.jar  save_db_info=172.16.4.17-prediction-caiqiu-Caiqiu502  maxResultSize=4g  job_id=8439 spark_id=11  big_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/csv/football_match_500w_3.csv  redis_server=172.16.0.67  output_home_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/result/FBST2017001_home.csv  output_away_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/result/FBST2017001_away.csv  redis_port=6379 > /data/caiqiu/log/a.log < /dev/null 2>&1  &
//
//
//sqoop export  --connect jdbc:mysql://172.16.4.17/prediction --username root --password Caiqiu502 --table FBST2017001 --update-mode allowinsert --update-key "team_id,season_id,type" --fields-terminated-by ','  -export-dir hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/result/FBST2017001_home.csv
//
//sqoop export  --connect jdbc:mysql://172.16.4.17/prediction --username root --password Caiqiu502 --table FBST2017001 --update-mode allowinsert --update-key "team_id,season_id,type" --fields-terminated-by ','  -export-dir hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/result/FBST2017001_away.csv




//caiqr.model.statistics.fb.FBST2017001

package caiqr.model.statistics.fb

import com.redislabs.provider.redis._
import caiqr.utils.PredictionUtils
import caiqr.utils.PredictionDBUtils
import caiqr.utils.AllFBMatchInputFile
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SQLContext, DataFrame}
import java.text.SimpleDateFormat
import java.sql.DriverManager
import java.lang.IllegalArgumentException
//import java.util.ArrayList

object FBST2017001 {

  def main(args: Array[String]){

    //////////////////////////////// 接收参数 ////////////////////////////////
    // 将参数转换为 Map
    val cmd_list = args.toList
    val cmd = cmd_list.map{p =>
      val items = p.split("=")
      (items(0),items(1))
    }
    val cmd_map = cmd.toMap

    val job_id_res1 = cmd_map.get("job_id")
    val job_id = job_id_res1.getOrElse("")

    val model_id_res1 = cmd_map.get("spark_id")
    val model_id = model_id_res1.getOrElse("")

    val big_file_res1 = cmd_map.get("big_file")
    val big_file = big_file_res1.getOrElse("")
    val save_db_info = cmd_map.get("save_db_info").getOrElse("") //DB info

    //结果输出文件(HDFS)
    val output_home_file = cmd_map.get("output_home_file").getOrElse("")
    val output_away_file = cmd_map.get("output_away_file").getOrElse("")

    if (job_id=="" || model_id=="" || big_file=="") {
      throw new IllegalArgumentException("Spark main args is error. ${args.length}")
    }

    //运行时结果集大小: 默认4g
    val maxResultSize_res1 = cmd_map.get("maxResultSize")
    val maxResultSize = maxResultSize_res1.getOrElse("4g")
    //////////////////////////////// 接收参数 ////////////////////////////////



    // 环境变量
    val conf = new SparkConf().setAppName("FBST2017001")
      .set("spark.driver.maxResultSize", maxResultSize)
    val sc = new SparkContext(conf)
    val sqlContext = new org.apache.spark.sql.SQLContext(sc)

    // hdfs 加载赛事文件, 返回 df
    val match_df = AllFBMatchInputFile.load(sc, sqlContext, big_file)
    //match_df.head(10).foreach(println)


    //1.球队赛季主客场胜率
    //home_flag: 1-主场,0-客场
    calculate_FBST2017001_team_season_win_data(sc, 1, match_df, output_home_file)
    calculate_FBST2017001_team_season_win_data(sc, 2, match_df, output_away_file)


    // 更新job 和spark状态
    // spark运行完成,待 sqoop导入DB
    PredictionDBUtils.update_job_spark_status(save_db_info, job_id, model_id)


    sc.stop()
  }

  //1.球队赛季主客场胜率
  //def calculate_FBST2017001_team_season_win_data(match_df: DataFrame, sqlContext: SQLContext): DataFrame ={
  def calculate_FBST2017001_team_season_win_data(sc: SparkContext, home_flag: Int, match_df: DataFrame, output_file: String): Unit ={
    //match_df格式:
    //  match_id,season_id,season_pre,host_id,away_id, 0-4
    //  group_pre,host_score,away_score,match_time, 5-8
    //  backup,recommend,spf_cnt,myear,mmonth, 9-13
    //  home_match_result,away_match_result,score 14-16

    //3. 初盘-终盘范围统计
    // 转换为元祖
    // OUT:
    // (host_id_赛季ID, ((赛果,赛事时间), 总进球, 进球数, 失球数, match_id))
    //    (678_1978,((3,1269198900000),3,3,0,285550))
    //    (1256_1939,((3,1269626400000),6,4,2,285551))
    //    (4987_1939,((1,1269698400000),2,1,1,285552))
    val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val tuple_same_init_odds_rdd = match_df.rdd.map{p =>
      val match_time_second = sdf.parse(p.getString(8)).getTime

      if (home_flag==1){
        (s"${p.getString(3)}_${p.getString(1)}",
          ((p.getString(14),match_time_second.toString()), p.getString(16), p.getString(6).toInt, p.getString(7).toInt, p.getString(0)) )
      }
      else {
        (s"${p.getString(4)}_${p.getString(1)}",
          ((p.getString(15),match_time_second.toString()), p.getString(16), p.getString(7).toInt, p.getString(6).toInt, p.getString(0)) )
      }
    }
    //tuple_same_init_odds_rdd.collect().foreach(println)


    //4. 分组(groupByKey), 并将分组后keys值, 按照序号排序(保证结果集按照顺序)
    //OUT: (xxx,((3,0),(3,1),(3,2),...))
    /*
    * OUT:
    * 395_3720
    *  ((0,1389443400000),1,0,1,556035)
    *  ((0,1390039200000),3,1,2,556037)
    *
    * */
    val new_tuple_same_init_odds_index_order_rdd = tuple_same_init_odds_rdd.groupByKey().map{ p =>
      val sortArray = p._2.toArray.sortWith(_._1._2 < _._1._2)
      (p._1, sortArray)
    }
    //    new_tuple_same_init_odds_index_order_rdd.collect().foreach{p =>
    //      println(p._1)
    //      p._2.foreach(println)
    //    }

    //5. reduce结果集
    //OUT:
    //(1989_3202,(30,3_3),471522_471530)
    val new_tuple_same_init_odds_rdd = new_tuple_same_init_odds_index_order_rdd.map(p =>
      (p._1, (p._2.map(p => p._1._1).reduce(_.toString+_.toString), //赛果
              p._2.map(p => p._2).reduce(_+"_"+_), // 总进球
              p._2.map(p => p._3).reduce(_.toInt+_.toInt), // 进球数
              p._2.map(p => p._4).reduce(_.toInt+_.toInt), // 失球数
              p._2.map(p => p._5).reduce(_+"_"+_)) // match_id

      )
    )
    //new_tuple_same_init_odds_rdd.collect().foreach(println)


    //6. 汇总最终结果,保存 hdfs
    //OUT:
    //(team_id,season_id,赛果,总进球,进球数,失球数,matchidlist,赛果总数)
    val same_init_odds_map_rdd = new_tuple_same_init_odds_rdd.map { p =>
      val keys = p._1.split("_")
      val team_id = keys(0)
      val season_id = keys(1)
      val result_size = p._2._1.toString.length.toInt
      (team_id,season_id,p._2._1,p._2._2,p._2._3,p._2._4,p._2._5,result_size)
      //s"${team_id},${season_id},${p._2._1},${p._2._2},${p._3},${result_size}"
    }
    //same_init_odds_map_rdd.collect().foreach(println)



    //汇总结果集合
    //OUT
    val result_rdd = same_init_odds_map_rdd.map{ p =>
      val team_id = p._1
      val season_id = p._2
      val results = p._3
      val goal = p._4
      val home_score = p._5
      val away_score = p._6
      val matchids = p._7
      val result_cnt = p._8

      //胜平负场次数
      val win = results.count(_ == '3')
      val draw = results.count(_ == '1')
      val loss = results.count(_ == '0')

      val match_score = win.toInt*3+draw.toInt*1 //积分

      //胜平负率
      val ratio3 = (win.toDouble / result_cnt.toDouble * 100).round.toInt
      val ratio1 = (draw.toDouble / result_cnt.toDouble * 100).round.toInt
      val ratio0 = (loss.toDouble / result_cnt.toDouble * 100).round.toInt
      val ratio31 = ((win.toDouble+draw.toDouble) / result_cnt.toDouble * 100).round.toInt
      val ratio30 = ((win.toDouble+loss.toDouble) / result_cnt.toDouble * 100).round.toInt
      val ratio10 = ((draw.toDouble+loss.toDouble) / result_cnt.toDouble * 100).round.toInt


//      (s"${team_id}_${season_id}",
//        s"${results}-${result_cnt}-${win}-${draw}-${loss}-${ratio3}-${ratio1}-${ratio0}-${ratio31}-${ratio30}-${ratio10}-${goal}-${matchids}-${home_score}-${away_score}-${match_score}"
//        )

      (team_id,season_id,home_flag,results,result_cnt,win,draw,loss,match_score,home_score,away_score,ratio3,ratio1,ratio0,ratio31,ratio30,ratio10,goal,matchids)
    }
    //result_rdd.collect().foreach(println)


    //计算结果保存 HDFS
    val result_file_rdd = result_rdd.
      map { case (team_id,season_id,home_flag,results,result_cnt,win,draw,loss,match_score,home_score,away_score,ratio3,ratio1,ratio0,ratio31,ratio30,ratio10,goal,matchids) => Array(team_id,season_id,home_flag,results,result_cnt,win,draw,loss,match_score,home_score,away_score,ratio3,ratio1,ratio0,ratio31,ratio30,ratio10,goal,matchids).mkString(",") }


    if(!result_file_rdd.isEmpty()) {
      result_file_rdd.saveAsTextFile(output_file)
    }



  }


}





