
//cp FirstSparkApp/out/artifacts/FirstSparkAppJar/firstsparkapp.jar /data/caiqiu/prediction/jar/fb_FBA2017008.jar
//
//scp FirstSparkApp/out/artifacts/FirstSparkAppJar/firstsparkapp.jar root@172.16.0.71:/data/caiqiu/prediction/jar/fb_FBA2017008.jar
//
//
//nohup ./bin/spark-submit --master spark://skn-rqg382b1-spark-master:7077 --class caiqr.model.FBA2017008.FBA2017008 --jars /usr/local/spark/jars/mysql-connector-java-5.1.35.jar --executor-memory 6G --driver-memory 4G /data/caiqiu/prediction/jar/fb_FBA2017008.jar maxResultSize=6g min_match_result_cnt=6 save_db_info=172.16.4.17-prediction-caiqiu-Caiqiu502 big_file=hdfs://skn-qcqegnt5-hadoop-master:9000/data/caiqiu/csv/football_all_asia_500w_1.csv match_big_file=hdfs://skn-qcqegnt5-hadoop-master:9000/data/caiqiu/csv/football_match_reason_asia_500w_1.csv output_file=hdfs://skn-qcqegnt5-hadoop-master:9000/data/caiqiu/result/FBA2017008.csv job_id=8632 spark_id=11 > /data/log/prediction/a.log < /dev/null 2>&1 &

///data/caiqiu/prediction/bin/sqoop_export.sh 172.16.4.17 prediction caiqiu Caiqiu502 reason_season_hdfs type,season_pre,company_id,season_id,odds,match_id skn-qcqegnt5 /data/caiqiu/result/FBA2017008_1.csv


//caiqr.model.FBA2017008.FBA2017008

//赛事-初盘-终盘-所有赛季数据
//赛事-初盘-终盘-分赛季数据
//例如:
//    英超,season_id,company_id,0.5,0.5,数据匹配
//    英超,season_id,company_id,0.5,0.5,数据匹配

package caiqr.model.FBA2017008
import caiqr.utils.{AllAsiaInputFile, PredictionUtils, PredictionDBUtils}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SQLContext, DataFrame}
import java.text.SimpleDateFormat
import java.sql.DriverManager
import java.lang.IllegalArgumentException


object FBA2017008 {

  def main(args: Array[String]){

    // 至少4个参数:
    // match_id_list, save_db_info, job_id, spark_id
    if (args.length < 5) {
      throw new IllegalArgumentException("Spark main args is error. ${args.length}")
    }

    //////////////////////////////// 接收参数 ////////////////////////////////
    // 将参数转换为 Map
    val cmd_list = args.toList
    val cmd = cmd_list.map{p =>
      val items = p.split("=")
      (items(0),items(1))
    }
    val cmd_map = cmd.toMap

    // 接收参数

    val matchid_big_file = cmd_map.get("match_big_file").getOrElse("")//要计算的比赛赔率信息数据文件
    val job_id = cmd_map.get("job_id").getOrElse("")
    val model_id = cmd_map.get("spark_id").getOrElse("")
    val big_file = cmd_map.get("big_file").getOrElse("")
    val output_file = cmd_map.get("output_file").getOrElse("")
    val maxResultSize = cmd_map.get("maxResultSize").getOrElse("4g")
    val min_match_result_cnt = cmd_map.get("min_match_result_cnt").getOrElse("6")
    val save_db_info = cmd_map.get("save_db_info").getOrElse("")


    if (matchid_big_file=="" || job_id=="" || model_id=="" || big_file=="" || output_file=="") {
      throw new IllegalArgumentException("Spark main args is error. ${args.length}")
    }
    //////////////////////////////// 接收参数 ////////////////////////////////


    // 1. 环境变量
    val sc = new SparkContext(new SparkConf()
      .setAppName("FBA2017008")
      .set("spark.driver.maxResultSize", maxResultSize)
    )
    val sqlContext = new org.apache.spark.sql.SQLContext(sc)


    // 2. 加载亚盘盘口文件
    val src_asia_df = AllAsiaInputFile.load(sc, sqlContext, big_file)
    //src_asia_df.show()
    val asia_df = src_asia_df.
      where("match_time>'2012-01-01 00:00:00'").
      select("season_pre","company_id","match_time","init_odds","curr_odds", "home_match_result", "curr_home_result", "half_all_result", "goal", "sscore", "rqresult_diff", "season_id", "match_id").
      orderBy("company_id").orderBy("match_time")
    //asia_df.show()
    //printf(s"cnt=${asia_df.count}")


    // 3. 加载大数据文件(待计算的比赛赔率文件)
    val src_need_calculate_match_df = AllAsiaInputFile.load_match_file_rangqiu(sc, sqlContext, matchid_big_file)
    val need_calculate_match_df = src_need_calculate_match_df.
      select("season_pre","company_id","init_odds","curr_odds","rq_odds","season_id","match_id").
      orderBy("season_pre").orderBy("company_id")
    //need_calculate_match_df.show()


    val odds_result_df = calculate_per_season_odds_FBA2017008(asia_df, sqlContext)
    //printf(s"odds_cnt=${odds_result_df.count}")
    //odds_result_df.show()
    calculate_match_season_odds_FBA2017008(odds_result_df, need_calculate_match_df, min_match_result_cnt.toInt, output_file)


    PredictionDBUtils.update_job_spark_status(save_db_info, job_id, model_id)
    sc.stop()
  }




  //表示获取每个赛季
  def calculate_per_season_odds_FBA2017008(asia_df: DataFrame, sqlContext: SQLContext): DataFrame ={
    //3. 初盘-终盘范围统计
    // 转换为元祖
    // OUT:
    // (公司ID_初盘范围_终盘范围, ((赛果,赛事时间),(赛事ID,赛事时间), 比分))
    //(欧洲杯_140_2194_-275_-150,((301925,1314982800000),(3,1314982800000),(33,1314982800000),(3,1314982800000),(21,1314982800000),1))
    val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val tuple_same_init_odds_rdd = asia_df.rdd.map{p =>

      //season_pre,company_id,match_time,init_odds,curr_odds, (0,4)
      //home_match_result,curr_home_result,half_all_result,goal,sscore,rqresult_diff,season_id,match_id (5-12)

      val match_time_second = sdf.parse(p.getString(2)).getTime
      //key: season_pre, company_id, season_id, init_odds, curr_odds
      (s"${p.getString(0)}_${p.getString(1)}_${p.getString(11)}_${p.getString(3)}_${p.getString(4)}",
        (
          (p.getString(12),match_time_second.toString()), //match_id
          (p.getString(5),match_time_second.toString()), //SPF赛果
          (p.getString(7),match_time_second.toString()), //半全场赛果
          (p.getString(8),match_time_second.toString()), //总进球
          (p.getString(9),match_time_second.toString()), //比分
          p.getString(10)  // 比分差,用于计算让球胜平负赛果
          )
        )
    }
    //tuple_same_init_odds_rdd.collect().foreach(println)



    //4. 分组(groupByKey), 并将分组后keys值, 按照序号排序(保证结果集按照顺序)
//    欧洲杯_461_2194_25_25
//    ((301985,1314989100000),(0,1314989100000),(10,1314989100000),(1,1314989100000),(01,1314989100000),-1)
//    ((301929,1318012200000),(0,1318012200000),(00,1318012200000),(4,1318012200000),(13,1318012200000),-2)
//    ((302024,1318356000000),(1,1318356000000),(31,1318356000000),(2,1318356000000),(11,1318356000000),0)
    val new_tuple_same_init_odds_index_order_rdd = tuple_same_init_odds_rdd.groupByKey().map{ p =>
      val sortArray = p._2.toArray.sortWith(_._1._2 > _._1._2)
      (p._1, sortArray)
    }
//    new_tuple_same_init_odds_index_order_rdd.collect().foreach{p =>
//      println(p._1)
//      p._2.foreach(println)
//    }



    //5. reduce结果集
    //OUT:
    //(欧洲杯_9_2194_125_125,(302047_302015,00,00_00,2_3,02_12,-2_-1,1315328400000_1314990000000))
    val new_tuple_same_init_odds_rdd = new_tuple_same_init_odds_index_order_rdd.map(p =>
      (p._1, (
        p._2.map(p => p._1._1).reduce(_+"_"+_),
        p._2.map(p => p._2._1).reduce(_+_),
        p._2.map(p => p._3._1).reduce(_+"_"+_),
        p._2.map(p => p._4._1).reduce(_+"_"+_),
        p._2.map(p => p._5._1).reduce(_+"_"+_),
        p._2.map(p => p._6).reduce(_+"_"+_),
        p._2.map(p => p._1._2).reduce(_+"_"+_)
        )
        )
    )
    //new_tuple_same_init_odds_rdd.collect().take(200).foreach(println)


    //6. 汇总最终结果,保存 hdfs
    //OUT:
    //欧洲杯,366,2194,-225,-175,302020_301930_301958,333,33_33_13,3_5_1,30_41_10,3_3_1,3
    val same_init_odds_map_rdd = new_tuple_same_init_odds_rdd.map { p =>

      //key = 欧洲杯_9_2194_125_125
      val keys = p._1.split("_")
      val season_pre = keys(0)
      val company_id = keys(1)
      val season_id = keys(2)
      val init_odds = keys(3)
      val curr_odds = keys(4)
      val result_size = p._2._2.toString.length.toInt

      s"${season_pre},${company_id},${season_id},${init_odds},${curr_odds},${p._2._1},${p._2._2},${p._2._3},${p._2._4},${p._2._5},${p._2._6},${result_size}"
    }
    //same_init_odds_map_rdd.collect().take(200).foreach(println)


    // 7. 将结果集保存为DF, 返回
    // 不再保存 hdfs, 用时: 2.6 min
    val schemaString = "bseason_pre,bcompany_id,bseason_id,binit_odds,bcurr_odds,match_ids,result,half_all,goals,scores,rqresult_diff,result_size"
    import org.apache.spark.sql.Row
    import org.apache.spark.sql.types.{StringType, StructField, StructType}
    val schema =
      StructType(
        schemaString.split(",").map(fieldName => StructField(fieldName, StringType, nullable = true)))
    val rowRDD = same_init_odds_map_rdd.map(_.split(",")).map(p => Row(p(0), p(1), p(2), p(3), p(4), p(5), p(6), p(7), p(8), p(9), p(10), p(11)))
    val all_asia_range_df = sqlContext.createDataFrame(rowRDD, schema)
    return all_asia_range_df
  }






  //计算指定比赛的亚盘数据匹配
  def calculate_match_season_odds_FBA2017008(result_df: DataFrame, match_df: DataFrame, min_match_result_cnt: Int, output_file: String): Unit ={

    val init_odds_df = result_df.join(match_df).
      where(result_df("bseason_pre") === match_df("season_pre")).
      where(result_df("bcompany_id") === match_df("company_id")).
      //where(result_df("bseason_id") === match_df("season_id")).
      where(result_df("binit_odds") === match_df("init_odds")).
      where(result_df("bcurr_odds") === match_df("curr_odds")).
      where(result_df("result_size") > min_match_result_cnt).
      select("season_pre","company_id","bseason_id","init_odds","curr_odds","match_ids","result","half_all","goals","scores","rqresult_diff","result_size","rq_odds","match_id").
      orderBy("season_pre").orderBy("company_id").orderBy("bseason_id")

    val result_rdd = init_odds_df.rdd.map{ p =>

      val season_pre = p.getString(0)
      val company_id = p.getString(1)
      val season_id = p.getString(2)
      val init_odds = p.getString(3)
      val curr_odds = p.getString(4)
      val match_ids = p.getString(5)

      val spf_results = p.getString(6)
      val half_results = p.getString(7)
      val goal_results = p.getString(8)
      val score_results = p.getString(9)

      val rqresult_diff = p.getString(10)
      val result_cnt = p.getString(11)
      val rq_odds = p.getString(12).toInt
      val match_id = p.getString(13)

      //计算让球胜平负赛果各个选项(盘口已竞彩盘口为准)
      val rq_results = rqresult_diff.split("_").map{ q =>
        val odds = -(rq_odds)
        if (q.toInt>odds){
          3
        }else if (q.toInt==odds){
          1
        }else{
          0
        }
      }

      (season_pre,company_id,season_id,init_odds,curr_odds,match_id,result_cnt,
        spf_results,
        rq_odds,rq_results.mkString(""),
        half_results,
        goal_results,
        score_results,
        match_ids)
    }
    //result_rdd.collect().take(100).foreach(println)


    //计算结果保存 HDFS(保存亚盘/欧盘公司数据表通用字段)
    val result_file_rdd = result_rdd.map {
      case (season_pre,company_id,season_id,init_odds,curr_odds,match_id,result_cnt,spf_results,rq_odds,rq_results,half_results,goal_results,score_results,match_ids) => {

        //返回文件格式:
        //type, match_id, team_id, company_id, cnt, message, update_time
        val message = s"${spf_results}|${rq_odds}|${rq_results}|${half_results}|${goal_results}|${score_results}|${match_ids}"
        val odds = s"${init_odds}_${curr_odds}"

        //格式:
        //FBA2017008,英超,1055,3822,7,75|100|3000100|1|3000310|33_00_10_00_01_10_00|4_4_2_5_4_3_5|31_04_02_14_22_12_14|572978_572950_572923_572913_572762_572741_572737
        Array("FBA2017008",season_pre,company_id,season_id,odds,match_id,result_cnt,message).mkString(",")
      }
    }
    //result_file_rdd.collect().take(10).foreach(println)
    //printf(s"result_file_rdd_cnt=${result_file_rdd.count}")


    if(!result_file_rdd.isEmpty()) {
      result_file_rdd.saveAsTextFile(output_file)
    }

  }




}
