


//
//cp FirstSparkApp/out/artifacts/FirstSparkAppJar/firstsparkapp.jar /data/caiqiu/prediction/jar/fb_RFB2017001.jar
//
//scp FirstSparkApp/out/artifacts/FirstSparkAppJar/firstsparkapp.jar root@172.16.0.71:/data/caiqiu/prediction/jar/fb_RFB2017001.jar
//
//
//nohup ./bin/spark-submit --master spark://skn-pmukvrk0-spark-master:7077 --class caiqr.reason.RFB2017001 --jars /usr/local/spark/lib/mysql-connector-java-5.1.35.jar --executor-memory 6G --driver-memory 4G /data/caiqiu/prediction/jar/fb_RFB2017001.jar min_match_result_cnt=5 maxResultSize=6g big_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/csv/football_all_asia_500w_1.csv match_big_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/csv/football_match_reason_asia_500w_1.csv save_db_info=172.16.4.17-prediction-caiqiu-Caiqiu502 output_home_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/result/RFB2017001_home.csv output_away_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/result/RFB2017001_away.csv job_id=8632 spark_id=11 > /data/log/prediction/a.log < /dev/null 2>&1 &
//
//
///data/caiqiu/prediction/bin/sqoop_export.sh 172.16.4.17 prediction caiqiu Caiqiu502 reason_team_hdfs type,match_id,team_id,company_id skn-wf2zlrwn /data/caiqiu/result/RFB2017001*.csv



//caiqr.reason.RFB2017001

//
//RFB2017001
//1. 主客队对阵相似球队战绩统计(竞彩玩法统计)
//  2. 公司+亚盘初盘口+终盘口
//3. 实例: 巴萨vs皇马, 半球, 半球
//  1). 巴萨,主场,bet365, 半球-半球,
//  2). 皇马,客场,bet365, 半球-半球,
//
//* 主队/客场让半球(盘口),胜率超过70%,其中让胜比例65%,让平比例35%
//  主队/客场让半球(盘口),最多比分: 2:1(13.5%), 1:1(13.5%), 0:1(13.5%), 2:1(13.5%)
//  主队/客场让半球(盘口),半全场比例排行: 平胜(13.5%), 平平(13.5%), 胜负(13.5%)
//  主队/客场让半球(盘口),总进球比例排行: 3(13.5%), 1(13.5%), 0(13.5%)


package caiqr.reason
import caiqr.utils.AllAsiaInputFile
import caiqr.utils.PredictionUtils
import caiqr.utils.PredictionDBUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SQLContext, DataFrame}


import java.text.SimpleDateFormat
import java.sql.DriverManager
import java.lang.IllegalArgumentException

object RFB2017001 {

  def main(args: Array[String]){

    //////////////////////////////// 接收参数 ////////////////////////////////
    // 将参数转换为 Map
    val cmd_list = args.toList
    val cmd = cmd_list.map{p =>
      val items = p.split("=")
      (items(0),items(1))
    }
    val cmd_map = cmd.toMap


    val big_file_res1 = cmd_map.get("big_file") //所有盘口大数据文件
    val big_file = big_file_res1.getOrElse("")

    val matchid_big_file_res1 = cmd_map.get("match_big_file") //要计算的比赛赔率信息数据文件
    val matchid_big_file = matchid_big_file_res1.getOrElse("")

    val save_db_info_res1 = cmd_map.get("save_db_info")
    val save_db_info = save_db_info_res1.getOrElse("")

    val maxResultSize_res1 = cmd_map.get("maxResultSize")
    val maxResultSize = maxResultSize_res1.getOrElse("4g")

    val min_match_result_cnt_res1 = cmd_map.get("min_match_result_cnt")
    val min_match_result_cnt = min_match_result_cnt_res1.getOrElse("3")

    val job_id_res1 = cmd_map.get("job_id")
    val job_id = job_id_res1.getOrElse("")

    val model_id_res1 = cmd_map.get("spark_id")
    val model_id = model_id_res1.getOrElse("")

    val output_home_file = cmd_map.get("output_home_file").getOrElse("")
    val output_away_file = cmd_map.get("output_away_file").getOrElse("")


    // 1. 环境变量
    val sc = new SparkContext(new SparkConf()
      .setAppName("RFB2017001")
      .set("spark.driver.maxResultSize", maxResultSize)
    )
    val sqlContext = new org.apache.spark.sql.SQLContext(sc)

    // 2. hdfs 加载亚盘盘口文件, 返回 df
    val src_asia_df = AllAsiaInputFile.load(sc, sqlContext, big_file)
    //src_asia_df.collect().take(100).foreach(println)

    val asia_df = src_asia_df.
      orderBy(src_asia_df("match_id").asc, src_asia_df("company_id").asc, src_asia_df("match_time").asc)
      .select("match_id","company_id","match_time","init_odds","curr_odds","host_id","away_id", "home_match_result", "curr_home_result", "half_all_result", "goal", "sscore", "rqresult_diff")
    //asia_df.collect().take(100).foreach(println)


    // 3. 加载大数据文件(待计算的比赛赔率文件)
    val src_need_calculate_match_df = AllAsiaInputFile.load_match_file_rangqiu(sc, sqlContext, matchid_big_file)
    val need_calculate_match_df = src_need_calculate_match_df.
      orderBy(src_need_calculate_match_df("match_id").asc, src_need_calculate_match_df("company_id").asc, src_need_calculate_match_df("match_time").asc).
      select("match_id","company_id","init_odds","curr_odds","host_id","away_id","rq_odds")
    //need_calculate_match_df.collect().take(100).foreach(println)


    // 4. 计算
    //主队
    val home_result_df = calculate_odds_data_RFB2017001(asia_df, sqlContext, "home")
    //home_result_df.collect().take(200).foreach(println)
    calculate_match_data_RFB2017001(home_result_df, need_calculate_match_df, min_match_result_cnt.toInt, "home", output_home_file)
    //calculate_match_data_RFB2017001(home_result_df, need_calculate_match_df, "home", output_home_file)


    //客队
    val away_result_df = calculate_odds_data_RFB2017001(asia_df, sqlContext, "away")
    calculate_match_data_RFB2017001(away_result_df, need_calculate_match_df, min_match_result_cnt.toInt, "away", output_away_file)



    // 更新job 和spark状态
    // spark运行完成,待 sqoop导入DB
    PredictionDBUtils.update_job_spark_status(save_db_info, job_id, model_id)


    sc.stop()
  }



  //计算盘口数据,分组统计
  def calculate_odds_data_RFB2017001(asia_df: DataFrame, sqlContext: SQLContext, home_type: String): DataFrame ={
    //3. 初盘-终盘范围统计
    // 转换为元祖
    // OUT:
    // (公司ID_初盘范围_终盘范围, ((赛果,赛事时间),(赛事ID,赛事时间), 比分))
    val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val tuple_same_init_odds_rdd = asia_df.rdd.map{p =>

     // "match_id","company_id","match_time","home_match_result","away_match_result","curr_home_result","curr_away_result", 0-6
     // "init_odds","curr_odds","host_id","away_id" 7-10

      //"match_id","company_id","match_time","init_odds","curr_odds","host_id","away_id", 0-6
      // "home_match_result", "curr_home_result", "half_all_result", "goal", "score", "rqresult_diff" 7-12

      val match_time_second = sdf.parse(p.getString(2)).getTime

      if(home_type == "home"){  //主队
        (s"${p.getString(5)}_${p.getString(1)}_${p.getString(3)}_${p.getString(4)}",
          (
            (p.getString(0),match_time_second.toString()), //match_id
            (p.getString(7),match_time_second.toString()), //SPF赛果
            (p.getString(9),match_time_second.toString()), //半全场赛果
            (p.getString(10),match_time_second.toString()), //总进球
            (p.getString(11),match_time_second.toString()), //比分
            p.getString(12)  // 比分差,用于计算让球胜平负赛果
            )
          )
      }else{ // 客队
        (s"${p.getString(6)}_${p.getString(1)}_${p.getString(3)}_${p.getString(4)}",
          (
            (p.getString(0),match_time_second.toString()), //match_id
            (p.getString(7),match_time_second.toString()), //SPF赛果
            (p.getString(9),match_time_second.toString()), //半全场赛果
            (p.getString(10),match_time_second.toString()), //总进球
            (p.getString(11),match_time_second.toString()), //比分
            p.getString(12)  // 比分差,用于计算让球胜平负赛果
            )
          )
      }
    }
    //tuple_same_init_odds_rdd.collect().foreach(println)

    //4. 分组(groupByKey), 并将分组后keys值, 按照序号排序(保证结果集按照顺序)
    val new_tuple_same_init_odds_index_order_rdd = tuple_same_init_odds_rdd.groupByKey().map{ p =>
      val sortArray = p._2.toArray.sortWith(_._1._2 < _._1._2)
      (p._1, sortArray)
    }

    //5. reduce结果集
    //OUT:
    //(3518_1298_-25_-25,(568566_568700,03,00_33,1_3,01_21,-1_1,1466006340000_1478347200000))
    val new_tuple_same_init_odds_rdd = new_tuple_same_init_odds_index_order_rdd.map(p =>
      (p._1, (
              p._2.map(p => p._1._1).reduce(_+"_"+_),
              p._2.map(p => p._2._1).reduce(_+_),
              p._2.map(p => p._3._1).reduce(_+"_"+_),
              p._2.map(p => p._4._1).reduce(_+"_"+_),
              p._2.map(p => p._5._1).reduce(_+"_"+_),
              p._2.map(p => p._6).reduce(_+"_"+_),
              p._2.map(p => p._1._2).reduce(_+"_"+_)
              )
      )
    )
    //new_tuple_same_init_odds_rdd.collect().take(200).foreach(println)


    //6. 汇总最终结果,保存 hdfs
    //OUT:
    //[1255,1189,-100,-75,517667_521907_575828,113,31_11_13,2_4_3,11_22_21,0_0_1,3]
    val same_init_odds_map_rdd = new_tuple_same_init_odds_rdd.map { p =>
      val keys = p._1.split("_")
      val team_id = keys(0)
      val company_id = keys(1)
      val init_odds = keys(2)
      val curr_odds = keys(3)
      val result_size = p._2._2.toString.length.toInt

      s"${team_id},${company_id},${init_odds},${curr_odds},${p._2._1},${p._2._2},${p._2._3},${p._2._4},${p._2._5},${p._2._6},${result_size}"
    }


    // 7. 将结果集保存为DF, 返回
    // 不再保存 hdfs, 用时: 2.6 min
    val schemaString = "bteam_id,bcompany_id,binit_odds,bcurr_odds,match_ids,result,half_all,goals,scores,rqresult_diff,result_size"
    import org.apache.spark.sql.Row
    import org.apache.spark.sql.types.{StringType, StructField, StructType}
    val schema =
      StructType(
        schemaString.split(",").map(fieldName => StructField(fieldName, StringType, nullable = true)))
    val rowRDD = same_init_odds_map_rdd.map(_.split(",")).map(p => Row(p(0), p(1), p(2), p(3), p(4), p(5), p(6), p(7), p(8), p(9), p(10)))
    val all_asia_range_df = sqlContext.createDataFrame(rowRDD, schema)
    return all_asia_range_df
  }





  //计算指定比赛的亚盘数据匹配
  def calculate_match_data_RFB2017001(result_df: DataFrame, match_df: DataFrame, min_match_result_cnt: Int, team_type: String, output_file: String): Unit ={
  //def calculate_match_data_RFB2017001(result_df: DataFrame, match_df: DataFrame, team_type: String, output_file: String): Unit ={

    var host_id = ""
    if(team_type=="home"){
      host_id = "host_id"
    }else{
      host_id = "away_id"
    }

    val init_odds_df = result_df.join(match_df).
      where(result_df("bteam_id") === match_df(host_id)).
      where(result_df("bcompany_id") === match_df("company_id")).
      where(result_df("binit_odds") === match_df("init_odds")).
      where(result_df("bcurr_odds") === match_df("curr_odds")).
      where(result_df("result_size") > min_match_result_cnt).
      orderBy(match_df(host_id).asc, match_df("company_id").asc).
      select(host_id,"company_id","init_odds","curr_odds","match_ids","result","half_all","goals","scores","rqresult_diff","result_size","rq_odds","match_id")


    val result_rdd = init_odds_df.rdd.map{ p =>

      val team_id = p.getString(0)
      val company_id = p.getString(1)
      val init_odds = p.getString(2)
      val curr_odds = p.getString(3)
      val match_ids = p.getString(4)

      val spf_results = p.getString(5)
      val half_results = p.getString(6)
      val goal_results = p.getString(7)
      val score_results = p.getString(8)

      val rqresult_diff = p.getString(9)
      val result_cnt = p.getString(10)
      val rq_odds = p.getString(11).toInt
      val match_id = p.getString(12)

      //计算让球胜平负赛果各个选项(盘口已竞彩盘口为准)
      val rq_results = rqresult_diff.split("_").map{ q =>
        val odds = -(rq_odds)
        if (q.toInt>odds){
          3
        }else if (q.toInt==odds){
          1
        }else{
          0
        }
      }

      (match_id,team_id,company_id,init_odds,curr_odds,result_cnt,
        spf_results,
        rq_odds,rq_results.mkString(""),
        half_results,
        goal_results,
        score_results,
        match_ids)
    }
    //result_rdd.collect().take(100).foreach(println)


//    //获取当前系统时间
//    import java.util.Date
//    import java.text.SimpleDateFormat
//    var now:Date = new Date()
//    var dateFormat:SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
//    var update_time = dateFormat.format( now ).toString


    //计算结果保存 HDFS(保存亚盘/欧盘公司数据表通用字段)
    val result_file_rdd = result_rdd.map {
      case (match_id,team_id,company_id,init_odds,curr_odds,result_cnt,spf_results,rq_odds,rq_results,half_results,goal_results,score_results,match_ids) => {

      //返回文件格式:
      //type, match_id, team_id, company_id, cnt, message, update_time
      val message = s"${init_odds}|${curr_odds}|${spf_results}|${rq_odds}|${rq_results}|${half_results}|${goal_results}|${score_results}|${match_ids}"

      //格式:
      //RFB2017001,654625,2257,537,6,-125|-125|333030|-1|333010|33_33_33_00_13_00|5_2_6_3_3_1|41_20_42_12_21_01|531640_558562_558648_572673_595711_633403
      //Array("RFB2017001",match_id,team_id,company_id,result_cnt,message,update_time)
        Array("RFB2017001",match_id,team_id,company_id,result_cnt,message).mkString(",")
      }
    }
    //result_file_rdd.collect().take(10).foreach(println)



    if(!result_file_rdd.isEmpty()) {
      result_file_rdd.saveAsTextFile(output_file)
    }

  }



}


