

//cp FirstSparkApp/out/artifacts/FirstSparkAppJar/firstsparkapp.jar /data/caiqiu/prediction/jar/fb_FBA2017007.jar
//
//scp FirstSparkApp/out/artifacts/FirstSparkAppJar/firstsparkapp.jar root@172.16.0.83:/data/caiqiu/console/jar/fb_FBA2017007.jar
//
//
//nohup ./bin/spark-submit --master spark://skn-pmukvrk0-spark-master:7077 --class caiqr.model.FBA2017007.FBA2017007 --jars /usr/local/spark/lib/mysql-connector-java-5.1.35.jar --executor-memory 6G --driver-memory 4G /data/caiqiu/console/jar/fb_FBA2017007.jar min_match_result_cnt=30 maxResultSize=6g  big_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/csv/football_all_asia_500w_1.csv  match_big_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/csv/football_match_all_rq_asia_500w_1.csv  save_db_info=172.16.4.17-prediction-caiqiu-Caiqiu502  output_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/result/FBA2017007.csv  job_id=8632 spark_id=11  > /data/caiqiu/log/a.log < /dev/null 2>&1  &
//
//sqoop export  --connect jdbc:mysql://172.16.4.17/prediction --username root --password Caiqiu502 --table FBA2017007 --update-mode allowinsert --update-key "match_id,company_id,init_home,init_odds,init_away,curr_home,curr_odds,curr_away" --fields-terminated-by ','  -export-dir hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/result/FBA2017007.csv



//
//caiqr.model.FBA2017007.FBA2017007

//FBA2017007 (1.7min)
//1. 根据指定赛事的初盘口(含水位)+即时盘口(含水位)
//2. 计算让球胜平负结果(以竞彩让球胜平负盘口为准)

package caiqr.model.FBA2017007
import caiqr.utils.PredictionUtils
import caiqr.model.fb_asia_water.AsiaWaterUtils
import caiqr.utils.AllAsiaInputFile
import caiqr.utils.PredictionDBUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SQLContext, DataFrame}
import java.text.SimpleDateFormat
import java.sql.DriverManager
import java.lang.IllegalArgumentException

object FBA2017007 {

  def main(args: Array[String]){

    //////////////////////////////// 接收参数 ////////////////////////////////
    // 将参数转换为 Map
    val cmd_list = args.toList
    val cmd = cmd_list.map{p =>
      val items = p.split("=")
      (items(0),items(1))
    }
    val cmd_map = cmd.toMap


    val big_file_res1 = cmd_map.get("big_file") //所有盘口大数据文件
    val big_file = big_file_res1.getOrElse("")

    val matchid_big_file_res1 = cmd_map.get("match_big_file") //要计算的比赛赔率信息数据文件
    val matchid_big_file = matchid_big_file_res1.getOrElse("")

    val save_db_info_res1 = cmd_map.get("save_db_info")
    val save_db_info = save_db_info_res1.getOrElse("")

    val maxResultSize_res1 = cmd_map.get("maxResultSize")
    val maxResultSize = maxResultSize_res1.getOrElse("4g")

    val min_match_result_cnt_res1 = cmd_map.get("min_match_result_cnt")
    val min_match_result_cnt = min_match_result_cnt_res1.getOrElse("5")

    val job_id_res1 = cmd_map.get("job_id")
    val job_id = job_id_res1.getOrElse("")

    val model_id_res1 = cmd_map.get("spark_id")
    val model_id = model_id_res1.getOrElse("")
    val output_file = cmd_map.get("output_file").getOrElse("")


    // 1. 环境变量
    val sc = new SparkContext(new SparkConf()
      .setAppName("FBA2017007")
      .set("spark.driver.maxResultSize", maxResultSize)
    )
    val sqlContext = new org.apache.spark.sql.SQLContext(sc)



    // 2. hdfs 加载亚盘盘口文件, 返回 df
    val src_asia_df = AllAsiaInputFile.load(sc, sqlContext, big_file)
    val asia_df = src_asia_df.
      orderBy(src_asia_df("match_id").asc, src_asia_df("company_id").asc, src_asia_df("match_time").asc)
      .select("match_id","company_id","match_time","rqresult_diff","init_odds","curr_odds",
        "init_home_water","init_away_water","curr_home_water","curr_away_water")
    //asia_df.collect().take(100).foreach(println) //TODO


    // 3. 加载大数据文件(待计算的比赛赔率文件)
    val src_need_calculate_match_df = AllAsiaInputFile.load_match_file_rangqiu(sc, sqlContext, matchid_big_file)
    val need_calculate_match_df = src_need_calculate_match_df.
      orderBy(src_need_calculate_match_df("match_id").asc, src_need_calculate_match_df("company_id").asc, src_need_calculate_match_df("match_time").asc).
      select("match_id","company_id","init_odds","curr_odds","init_home_water","init_away_water","curr_home_water","curr_away_water", "rq_odds")
    println("need_calculate_match_df...")
    //need_calculate_match_df.collect().take(100).foreach(println) //TODO


    // 3.1 计算亚盘初盘-终盘 盘口+水位结果集
    val result_init_curr_df = AsiaWaterUtils.calculate_asia_init_curr_range_rq_data(asia_df, sqlContext)
    //println(s"####result_init_curr_df=${result_init_curr_df.collect().size}")
    //println("result_init_curr_df...")
    //result_init_curr_df.collect().take(100).foreach(println) //TODO


    // 5.1 计算指定比赛的亚盘数据匹配(初盘-终盘盘口+水位)
    calculate_match_data_FBA2017007(result_init_curr_df, need_calculate_match_df, min_match_result_cnt.toInt, output_file)


    // 更新job 和spark状态
    // spark运行完成,待 sqoop导入DB
    PredictionDBUtils.update_job_spark_status(save_db_info, job_id, model_id)


    sc.stop()
  }




  //计算指定比赛的亚盘数据匹配
  def calculate_match_data_FBA2017007(result_df: DataFrame, match_df: DataFrame, min_match_result_cnt: Int, output_file: String): Unit ={

    //    match_id,company_id,match_time,season_id,match_desc,season_pre,group_pre,host_id,away_id 0-8
    //    init_home,init_odds,init_away,curr_home,curr_odds,curr_away, 9-14
    //    init_home_water,init_away_water,curr_home_water,curr_away_water,init_ret,curr_ret 15-20

    val init_odds_df = result_df.join(match_df).
      where(result_df("bcompany_id") === match_df("company_id")).
      where(result_df("binit_home") === match_df("init_home_water")).
      where(result_df("binit_odds") === match_df("init_odds")).
      where(result_df("binit_away") === match_df("init_away_water")).
      where(result_df("bcurr_home") === match_df("curr_home_water")).
      where(result_df("bcurr_odds") === match_df("curr_odds")).
      where(result_df("bcurr_away") === match_df("curr_away_water")).
      where(result_df("result_size") > min_match_result_cnt).
      orderBy(match_df("match_id").asc, match_df("company_id").asc).
      select("match_id","company_id","init_home_water","init_odds","init_away_water","curr_home_water","curr_odds","curr_away_water","match_ids","rq_odds","rqresult_diff")
    //init_odds_df.collect().foreach(println)

    //bcompany_id,binit_home,binit_odds,binit_away,bcurr_home,bcurr_odds,bcurr_away,rqresult_diff,match_ids,result_size

    //[451,-2,-50,2,1,-125,-2,1_2_3_-1_1_2_0,568922_611237_618574_569602_614096_561576_627751,7]



    val result_rdd = init_odds_df.rdd.map{ p =>

      val match_id = p.getString(0)
      val company_id = p.getString(1)

      val init_home = p.getString(2)
      val init_odds = p.getString(3)
      val init_away = p.getString(4)
      val init = s"${init_home}_${init_away}"

      val curr_home = p.getString(5)
      val curr_odds = p.getString(6)
      val curr_away = p.getString(7)
      val curr = s"${curr_home}_${curr_away}"

      //获取最后10个元素
      val match_ids = p.getString(8).split("_").reverse.slice(0,10).mkString("_")
      val rq_odds = p.getString(9).toInt
      val rqresult_diff = p.getString(10)

      //计算让球胜平负赛果各个选项(盘口已竞彩盘口为准)
      val results = rqresult_diff.split("_").map{ q =>
        val odds = -(rq_odds)
        if (q.toInt>odds){
          3
        }else if (q.toInt==odds){
          1
        }else{
          0
        }
      }
      val result_cnt = results.length.toInt //让球胜平负赛果
      val win = results.count(_ == 3)
      val draw = results.count(_ == 1)
      val loss = results.count(_ == 0)
      val win_ratio = (win.toDouble / result_cnt.toDouble * 100).round.toInt
      val draw_ratio = (draw.toDouble / result_cnt.toDouble * 100).round.toInt
      val loss_ratio = (loss.toDouble / result_cnt.toDouble * 100).round.toInt

      (match_id,company_id,init_odds,curr_odds,init,curr,result_cnt,win,draw,loss,win_ratio,draw_ratio,loss_ratio,results.reverse.slice(0,10).mkString("_"),match_ids)
    }
    //result_rdd.collect().take(10).foreach(println)


    //计算结果保存 HDFS(保存亚盘/欧盘公司数据表通用字段)
    val result_file_rdd = result_rdd.
      map { case (match_id,company_id,init_odds,curr_odds,init,curr,result_cnt,win,draw,loss,win_ratio,draw_ratio,loss_ratio,results,match_ids) => {

      // 计算预测结果
      val result_str = PredictionUtils.calcultion_spf_result(win,draw,loss)
      val items = result_str.split("_")
      val final_result = items(0)
      val caiqiu_index = items(1)

//      Array("FBA2017007", match_id,company_id,result_cnt,win,draw,loss,final_result,caiqiu_index,"",9,match_ids,0,0,0,update_time).mkString(",")
//      Array("FBA2017007", match_id,company_id,result_cnt,win,draw,loss,final_result,caiqiu_index,"",9,match_ids,0,0,0,"2017-01-01 00:00:00").mkString(",")
      Array("FBA2017007", match_id,company_id,result_cnt,win,draw,loss,final_result,caiqiu_index,match_ids,1).mkString(",")
    }
    }
    //result_file_rdd.collect().take(10).foreach(println)



    if(!result_file_rdd.isEmpty()) {
      result_file_rdd.saveAsTextFile(output_file)
    }



  }












}


