//
//scp FirstSparkApp/out/artifacts/FirstSparkAppJar/firstsparkapp.jar root@172.16.0.82:/data/jar/firstsparkapp_football_asia_odds_500w.jar
//
//
//nohup ./bin/spark-submit --master spark://skn-pmukvrk0-spark-master:7077 --class caiqr.model.fb_asia_odds.AsiaOddsMain --jars /usr/local/spark/lib/spark-redis-0.1.1.jar,/usr/local/spark/lib/mysql-connector-java-5.1.35.jar,/usr/local/spark/lib/jedis-2.7.0.jar --executor-memory 4G  /data/jar/firstsparkapp_football_asia_odds_500w.jar maxResultSize=4g miniMatchCount=2 save_db_info=172.16.0.19-prediction-caiqiu-Caiqiu502 job_id=2583 spark_id=11 redis_server=172.16.0.82 redis_port=6379 asia_type=all big_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/csv/football_asia_500w_dds_all_1.csv > /data/caiqiu/log/bk.log < /dev/null 2>&1  &

package caiqr.model.fb_asia_odds

import com.redislabs.provider.redis._
//import caiqr.db.five_million.football_match_sporttery_service
import caiqr.model.fb_asia_odds.AsiaInitOrCurrOdds
import caiqr.model.fb_asia_odds.AsiaInitOrCurrOddsRange
import caiqr.model.fb_asia_odds.AsiaInitOrCurrOddsSp
import caiqr.model.fb_asia_odds.AsiaInitOrCurrInputFile
import caiqr.model.fb_asia_odds.AsiaOddsSaveDB
import caiqr.utils.PredictionUtils
import caiqr.utils.PredictionDBUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SQLContext, DataFrame}
import java.text.SimpleDateFormat
import java.sql.DriverManager
import java.lang.IllegalArgumentException

object AsiaOddsMain {

  def main(args: Array[String]) {

    // 至少4个参数:
    // maxResultSize
    if (args.length < 7) {
      throw new IllegalArgumentException("Spark main args is error. ${args.length}")
    }

    // 将参数转换为 Map
    val cmd_list = args.toList
    val cmd = cmd_list.map { p =>
      val items = p.split("=")
      (items(0), items(1))
    }
    val cmd_map = cmd.toMap

    //完成保存 job, 状态
    val save_db_info_res1 = cmd_map.get("save_db_info")
    val save_db_info = save_db_info_res1.getOrElse("")

    val job_id_res1 = cmd_map.get("job_id")
    val job_id = job_id_res1.getOrElse("")

    val spark_id_res1 = cmd_map.get("spark_id")
    val spark_id = spark_id_res1.getOrElse("")

    val big_file_res1 = cmd_map.get("big_file")
    val big_file = big_file_res1.getOrElse("")

    //运行时结果集大小: 默认4g
    val maxResultSize_res1 = cmd_map.get("maxResultSize")
    val maxResultSize = maxResultSize_res1.getOrElse("4g")

    //找到相同赛事最小场次, <最小场次忽略
    val miniMatchCount_res1 = cmd_map.get("miniMatchCount")
    val miniMatchCountStr = miniMatchCount_res1.getOrElse("2")
    val miniMatchCount = miniMatchCountStr.toInt

    val redis_server_res1 = cmd_map.get("redis_server")
    val redis_server = redis_server_res1.getOrElse("localhost")

    val redis_port_res1 = cmd_map.get("redis_port")
    val redis_port_str = redis_port_res1.getOrElse("6379")
    val redis_port = redis_port_str.toInt

    // 计算的亚盘类型(取值:"all","odds","range","sp")
    val asia_type_res1 = cmd_map.get("asia_type")
    val asia_type = asia_type_res1.getOrElse("all")
    //////////////////////////////// 接收参数 ////////////////////////////////


    // 1. 环境变量
//    val conf = new SparkConf().setAppName("AsiaOddsMain").
//      set("spark.driver.maxResultSize", maxResultSize)
//    val sc = new SparkContext(conf)

    val sc = new SparkContext(new SparkConf()
      .setAppName("AsiaOddsMain")
      .set("spark.driver.maxResultSize", maxResultSize)
      .set("redis.host", redis_server.toString)
      .set("redis.port", redis_port.toString)
      .set("redis.auth", "")
    )

    val sqlContext = new org.apache.spark.sql.SQLContext(sc)

    // 3. hdfs 加载亚盘盘口文件, 返回 df
    val asia_df = AsiaInitOrCurrInputFile.load(sc, sqlContext, big_file)


    if(asia_type == "all"){ // 全部计算
      compute_asia_odds(sc, asia_df, miniMatchCount, redis_server, redis_port)
      compute_asia_range(sc, asia_df, miniMatchCount, redis_server, redis_port)
      compute_asia_sp(sc, asia_df, miniMatchCount, redis_server, redis_port)

    }else if(asia_type == "odds"){ // 盘口相同
      compute_asia_odds(sc, asia_df, miniMatchCount, redis_server, redis_port)

    }else if(asia_type == "range"){ // 盘口范围相同
      compute_asia_range(sc, asia_df, miniMatchCount, redis_server, redis_port)

    }else if(asia_type == "sp"){ // 盘口赔率相同
      compute_asia_sp(sc, asia_df, miniMatchCount, redis_server, redis_port)
    }

    AsiaOddsSaveDB.save_job_status_to_mysql(save_db_info, job_id, spark_id)
  }


  // 计算亚盘盘口相同维度数据
  def compute_asia_odds(sc: SparkContext, asia_df: DataFrame, miniMatchCount: Int, redis_server: String, redis_port: Int ): Unit = {

    // 初盘盘口相同
    //1).odds_init_sid
    //sc.toRedisKV(AsiaInitOrCurrOdds.asia_init_odds_same_by_company_seasonid(asia_df, miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOdds.asia_init_odds_same_by_company_seasonid(asia_df, miniMatchCount), "odds_init_sid", (redis_server, redis_port))

    //2).odds_init_sname
//    sc.toRedisKV(AsiaInitOrCurrOdds.asia_init_odds_same_by_seasonname(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOdds.asia_init_odds_same_by_seasonname(asia_df,  miniMatchCount), "odds_init_sname", (redis_server, redis_port))

    // 终盘盘口相同
    //3).odds_curr_sid
//    sc.toRedisKV(AsiaInitOrCurrOdds.asia_curr_odds_same_by_company_seasonid(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOdds.asia_curr_odds_same_by_company_seasonid(asia_df,  miniMatchCount), "odds_curr_sid", (redis_server, redis_port))

    //4).odds_curr_sname
//    sc.toRedisKV(AsiaInitOrCurrOdds.asia_curr_odds_same_by_seasonname(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOdds.asia_curr_odds_same_by_seasonname(asia_df,  miniMatchCount), "odds_curr_sname", (redis_server, redis_port))

    // 初盘和终盘盘口都相同
    //5).odds_init_curr_sid
//    sc.toRedisKV(AsiaInitOrCurrOdds.asia_init_curr_odds_same_by_company_seasonid(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOdds.asia_init_curr_odds_same_by_company_seasonid(asia_df,  miniMatchCount), "odds_init_curr_sid", (redis_server, redis_port))

    //6).odds_init_curr_sname
//    sc.toRedisKV(AsiaInitOrCurrOdds.asia_init_curr_odds_same_by_seasonname(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOdds.asia_init_curr_odds_same_by_seasonname(asia_df,  miniMatchCount), "odds_init_curr_sname", (redis_server, redis_port))


  }


  // 计算亚盘盘口+范围相同维度数据
  def compute_asia_range(sc: SparkContext, asia_df: DataFrame, miniMatchCount: Int, redis_server: String, redis_port: Int ): Unit = {

    // 初盘盘口+水位范围相同
    //1).range_init_sid
//    sc.toRedisKV(AsiaInitOrCurrOddsRange.asia_init_range_same_by_company_seasonid(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOddsRange.asia_init_range_same_by_company_seasonid(asia_df,  miniMatchCount), "range_init_sid", (redis_server, redis_port))

    //2).range_init_sname
//    sc.toRedisKV(AsiaInitOrCurrOddsRange.asia_init_range_same_by_seasonname(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOddsRange.asia_init_range_same_by_seasonname(asia_df,  miniMatchCount), "range_init_sname",(redis_server, redis_port))

    // 终盘盘口+水位范围相同
    //3).range_curr_sid
//    sc.toRedisKV(AsiaInitOrCurrOddsRange.asia_curr_range_same_by_company_seasonid(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOddsRange.asia_curr_range_same_by_company_seasonid(asia_df,  miniMatchCount), "range_curr_sid", (redis_server, redis_port))

    //4).range_curr_sname
//    sc.toRedisKV(AsiaInitOrCurrOddsRange.asia_curr_range_same_by_seasonname(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOddsRange.asia_curr_range_same_by_seasonname(asia_df,  miniMatchCount), "range_curr_sname", (redis_server, redis_port))

    // 初盘和终盘盘口+水位范围相同
    //5).range_init_curr_sid
//    sc.toRedisKV(AsiaInitOrCurrOddsRange.asia_init_curr_range_same_by_company_seasonid(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOddsRange.asia_init_curr_range_same_by_company_seasonid(asia_df,  miniMatchCount), "range_init_curr_sid", (redis_server, redis_port))

    //6).range_init_curr_sname
//    sc.toRedisKV(AsiaInitOrCurrOddsRange.asia_init_curr_range_same_by_seasonname(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOddsRange.asia_init_curr_range_same_by_seasonname(asia_df,  miniMatchCount), "range_init_curr_sname", (redis_server, redis_port))


  }

  // 计算亚盘盘口+赔率相同维度数据
  def compute_asia_sp(sc: SparkContext, asia_df: DataFrame, miniMatchCount: Int, redis_server: String, redis_port: Int ): Unit = {

    //// 初盘盘口+赔率相同比赛.
    //sp_init_sname
    //1. 公司+赛事
//    sc.toRedisKV(AsiaInitOrCurrOddsSp.asia_init_sp_same_by_company_seasonname(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOddsSp.asia_init_sp_same_by_company_seasonname(asia_df,  miniMatchCount), "sp_init_sname", (redis_server, redis_port))

    //sp_init_all
    //2. 公司+所有比赛
//    sc.toRedisKV(AsiaInitOrCurrOddsSp.asia_init_sp_all_same_by_company_seasonname(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOddsSp.asia_init_sp_all_same_by_company_seasonname(asia_df,  miniMatchCount), "sp_init_all", (redis_server, redis_port))


    //// 终盘盘口+赔率相同比赛.
    //sp_curr_sname
    //3. 公司+赛事
//    sc.toRedisKV(AsiaInitOrCurrOddsSp.asia_curr_sp_same_by_company_seasonname(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOddsSp.asia_curr_sp_same_by_company_seasonname(asia_df,  miniMatchCount), "sp_curr_sname", (redis_server, redis_port))

    //sp_curr_all
    //4. 公司+所有比赛
//    sc.toRedisKV(AsiaInitOrCurrOddsSp.asia_curr_sp_all_same_by_company_seasonname(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOddsSp.asia_curr_sp_all_same_by_company_seasonname(asia_df,  miniMatchCount), "sp_curr_all", (redis_server, redis_port))

    //TODO
    //sp_init_curr_all
    //// 初盘盘口+赔率相同, + , 终盘盘口+赔率相同比赛.
    //5. 公司+所有比赛
//    sc.toRedisKV(AsiaInitOrCurrOddsSp.asia_init_curr_sp_same_by_company_seasonname(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOddsSp.asia_init_curr_sp_same_by_company_seasonname(asia_df,  miniMatchCount), "sp_init_curr_all", (redis_server, redis_port))

    // sp_init_curr_odds_sname
    //// 初盘盘口+赔率相同, 终盘盘口相同比赛.
    //6. 公司+赛事
//    sc.toRedisKV(AsiaInitOrCurrOddsSp.asia_init_sp_curr_odds_same_by_company_seasonname(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOddsSp.asia_init_sp_curr_odds_same_by_company_seasonname(asia_df,  miniMatchCount), "sp_init_curr_odds_sname", (redis_server, redis_port))

    // sp_init_curr_odds_all
    //7. 公司+所有比赛
//    sc.toRedisKV(AsiaInitOrCurrOddsSp.asia_init_sp_curr_odds_all_same_by_company(asia_df,  miniMatchCount), (redis_server, redis_port))
    sc.toRedisHASH(AsiaInitOrCurrOddsSp.asia_init_sp_curr_odds_all_same_by_company(asia_df,  miniMatchCount), "sp_init_curr_odds_all", (redis_server, redis_port))

  }



}


