//spark: 2+8G, 1.4 min

//
//#0. 上传 spark 应用程序 jar
//scp FirstSparkApp/out/artifacts/FirstSparkAppJar/firstsparkapp.jar root@172.16.0.83:/data/caiqiu/console/jar/football_asia_season_500w.jar
//
//#1. 导出所有亚盘大数据文件
//  nohup mysql --quick -N -ucaiqiu -pCaiqiu502 -h172.16.0.6 500w < ~/football_all_asia_500w.sql > /data/caiqiu/csv/football_all_asia_500w_1.csv &
//
//
//#2. 上传欧盘赔率文件到 hadoop
//cd /usr/local/hadoop; bin/hadoop fs -rmr -skipTrash hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/csv/football_all_asia_500w_1.csv
//cd /usr/local/hadoop; bin/hadoop fs -put file:///data/caiqiu/csv/football_all_asia_500w_1.csv  hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/csv/football_all_asia_500w_1.csv
//
//
//#3.导出要计算比赛,所有盘口信息 大数据文件
//  nohup mysql --quick -N -ucaiqiu -pCaiqiu502 -h172.16.0.6 500w < ~/football_match_all_asia_500w.sql > /data/caiqiu/csv/football_season_asia_500w_1.csv &
//
//
//#4.计算比赛大数据文件上传 hadoop
//  cd /usr/local/hadoop; bin/hadoop fs -rmr -skipTrash hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/csv/football_season_asia_500w_1.csv
//cd /usr/local/hadoop; bin/hadoop fs -put file:///data/caiqiu/csv/football_season_asia_500w_1.csv  hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/csv/football_season_asia_500w_1.csv
//
//
//#5.spark计算
//nohup ./bin/spark-submit --master spark://skn-pmukvrk0-spark-master:7077 --class caiqr.model.fb_asia_season.AsiaSeasonMain --jars /usr/local/spark/lib/spark-redis-0.1.1.jar,/usr/local/spark/lib/mysql-connector-java-5.1.35.jar,/usr/local/spark/lib/jedis-2.7.0.jar --executor-memory 6G  --driver-memory 4G  /data/caiqiu/console/jar/football_asia_season_500w.jar maxResultSize=6g  match_big_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/csv/football_season_asia_500w_1.csv  big_file=hdfs://skn-wf2zlrwn-hadoop-master:9000/data/caiqiu/csv/football_all_asia_500w_1.csv save_db_info=172.16.0.19-prediction-caiqiu-Caiqiu502 redis_server=172.16.0.60 redis_port=6379 > /data/caiqiu/log/a.log < /dev/null 2>&1  &

//FBA2017003
//1. 赛季+公司+初盘盘口,匹配相同公司的相似比赛
//2. 例如: 欧盘_bet365_半球
//3. 结果输出: fb_asia_season_init_odds


//
//
//
//caiqr.model.fb_asia_season.AsiaSeasonMain
//
//以下3个维度全部计算: 5.4min
//init_curr
//init
//curr


package caiqr.model.fb_asia_water
import com.redislabs.provider.redis._
import caiqr.utils.AllAsiaInputFile
import caiqr.utils.PredictionUtils
import caiqr.utils.PredictionDBUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SQLContext, DataFrame}
import java.text.SimpleDateFormat
import java.sql.DriverManager
import java.lang.IllegalArgumentException

object FBA2017003 {

  def main(args: Array[String]){

    //////////////////////////////// 接收参数 ////////////////////////////////
    // 将参数转换为 Map
    val cmd_list = args.toList
    val cmd = cmd_list.map{p =>
      val items = p.split("=")
      (items(0),items(1))
    }
    val cmd_map = cmd.toMap


    val big_file_res1 = cmd_map.get("big_file") //所有盘口大数据文件
    val big_file = big_file_res1.getOrElse("")

    val matchid_big_file_res1 = cmd_map.get("match_big_file") //要计算的比赛赔率信息数据文件
    val matchid_big_file = matchid_big_file_res1.getOrElse("")

    val save_db_info_res1 = cmd_map.get("save_db_info")
    val save_db_info = save_db_info_res1.getOrElse("")

    val maxResultSize_res1 = cmd_map.get("maxResultSize")
    val maxResultSize = maxResultSize_res1.getOrElse("4g")

    val min_match_result_cnt_res1 = cmd_map.get("min_match_result_cnt")
    val min_match_result_cnt = min_match_result_cnt_res1.getOrElse("10")

    val job_id_res1 = cmd_map.get("job_id")
    val job_id = job_id_res1.getOrElse("")

    val model_id_res1 = cmd_map.get("spark_id")
    val model_id = model_id_res1.getOrElse("")

    val redis_server_res1 = cmd_map.get("redis_server")
    val redis_server = redis_server_res1.getOrElse("localhost")

    val redis_port_res1 = cmd_map.get("redis_port")
    val redis_port_str = redis_port_res1.getOrElse("6379")
    val redis_port = redis_port_str.toInt

    // 1. 环境变量
    val sc = new SparkContext(new SparkConf()
      .setAppName("FBA2017003")
      .set("spark.driver.maxResultSize", maxResultSize)
      .set("redis.host", redis_server.toString)
      .set("redis.port", redis_port.toString)
      .set("redis.auth", "")
    )
    val sqlContext = new org.apache.spark.sql.SQLContext(sc)

    // 2. hdfs 加载亚盘盘口文件, 返回 df
    val asia_df = AllAsiaInputFile.load(sc, sqlContext, big_file)
    val match_df = AllAsiaInputFile.load(sc, sqlContext, matchid_big_file)

    //开始计算
    //3.1 赛季+公司+初盘盘口,赛果,初亚盘赛果
    calculate_asia_season_01(sc, asia_df, redis_server, redis_port, save_db_info, min_match_result_cnt, match_df, sqlContext)


    sc.stop()
  }

    //3.1 赛季+公司+初盘盘口,赛果,初亚盘赛果
    def calculate_asia_season_01(sc: SparkContext, asia_df: DataFrame, redis_server: String, redis_port: Int, save_db_info: String, min_match_result_cnt: String, match_df: DataFrame, sqlContext: SQLContext): Unit = {

    //3. 转换为元祖
    //((赛事ID_公司ID_初盘盘口), ((赛果,赛事时间),(赛事ID,赛事时间),亚初盘赛果))
    val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val tuple_same_init_odds_rdd = asia_df.rdd.map { p =>
      //      match_id,company_id,match_time,season_id, #0-3
      //      match_desc,season_pre,group_pre,host_id,away_id, #4-8
      //      home_match_result,away_match_result,score, #9-11
      //      init_home,init_odds,init_away,curr_home,curr_odds,curr_away, #12-17
      //      init_result,curr_result, #18,19
      //      init_home_water,init_away_water,curr_home_water,curr_away_water, #20-23
      //      init_ret,curr_ret #24,25
      val match_time = p.getString(2)
      val match_time_second = sdf.parse(match_time).getTime
      (s"${p.getString(3)}_${p.getString(5)}_${p.getString(1)}_${p.getString(13)}",
        ((p.getString(9), match_time_second.toString()), (p.getString(0), match_time_second.toString()), p.getString(18)))
    }
    //tuple_same_init_odds_rdd.collect().foreach(println)


    //4. 分组(groupByKey), 并将分组后keys值, 按照序号排序(保证结果集按照顺序)
    //OUT: (301925_487_1010_7500_16000,((3,0),(3,1),(3,2),...))
    val new_tuple_same_init_odds_index_order_rdd = tuple_same_init_odds_rdd.groupByKey().map { p =>
      val sortArray = p._2.toArray.sortWith(_._1._2 < _._1._2)
      (p._1, sortArray)
    }
    //            new_tuple_same_init_odds_index_order_rdd.collect().foreach{p =>
    //              println(p._1)
    //              p._2.foreach(println)
    //            }


    //5. reduce结果集
    //OUT:
    //(472_1700_3100_4200_1200_4300_13000,(310,321925_331925_305925,2011-02-20 01:15:00_2011-01-13 04:45:00_2010-11-11 05:00:00, 33_10_21))
    val new_tuple_same_init_odds_rdd = new_tuple_same_init_odds_index_order_rdd.map(p =>
      (p._1, (p._2.map(p => p._1._1).reduce(_ + _), p._2.map(p => p._2._1).reduce(_ + "_" + _), p._2.map(p => p._2._2).reduce(_ + "_" + _), p._2.map(p => p._3).reduce(_ + _)))
    )
    //new_tuple_same_init_odds_rdd.collect().foreach(println)



    //6. 汇总最终结果,保存 hdfs
    //OUT:
    //(472,1700,3100,4200,1200,4300,13000,33,215024_214933,1255543200000_1255545900000,33_10_12)
    //(公司ID,初盘范围,终盘范围, 赛果序列,赛事ID序列,开赛时间序列,比分序列)
    val same_init_odds_map_rdd = new_tuple_same_init_odds_rdd.map { p =>
      val keys = p._1.split("_")
      val season_id = keys(0)
      val season_pre = keys(1)
      val company_id = keys(2)
      val init_odds = keys(3)
      val result_size = p._2._1.toString.length.toInt

      //((赛事ID_公司ID_初盘盘口), ((赛果,赛事时间),(赛事ID,赛事时间),亚初盘赛果))
      s"${season_id},${season_pre},${company_id},${init_odds},${p._2._1},${p._2._4},${result_size},${p._2._2}"
      //(s"${season_id},${season_pre},${company_id},${init_odds}", s"${p._2._1},${p._2._4},${result_size},${p._2._2}")
    }


    // 7. 将结果集保存为DF, 返回
    // 不再保存 hdfs, 用时: 2.6 min
    val schemaString = "season_id,season_pre,company_id,init_odds,result,asia_result,result_size,match_ids"
    import org.apache.spark.sql.Row
    import org.apache.spark.sql.types.{StringType, StructField, StructType}
    val schema =
      StructType(
        schemaString.split(",").map(fieldName => StructField(fieldName, StringType, nullable = true)))
    val rowRDD = same_init_odds_map_rdd.map(_.split(",")).map(p => Row(p(0), p(1), p(2), p(3), p(4), p(5), p(6), p(7)))
    val all_asia_range_df = sqlContext.createDataFrame(rowRDD, schema)

    //8. 保存DB
    val init_odds_df = all_asia_range_df.join(match_df).
      where(all_asia_range_df("season_pre") === match_df("season_pre")).
      where(all_asia_range_df("company_id") === match_df("company_id")).
      where(all_asia_range_df("init_odds") === match_df("init_odds")).
      orderBy(match_df("match_id").asc, match_df("season_id").asc).
      selectExpr("season_id as src_season_id","season_pre as src_season_pre","company_id as src_company_id",
        "init_odds as src_init_odds","result as src_result","asia_result as src_asia_result",
        "result_size as src_result_size","match_ids as src_match_ids"
      )


    //结果集保存DB
    val split_items_arr = save_db_info.split("-")
    val db_ip = split_items_arr(0)
    val db_name = split_items_arr(1)
    val db_user = split_items_arr(2)
    val db_passwd = split_items_arr(3)
    val url = s"jdbc:mysql://${db_ip}:3306/${db_name}"
    val username = db_user
    val password = db_passwd
    Class.forName("com.mysql.jdbc.Driver").newInstance
    val conn = DriverManager.getConnection(url,username,password)


    init_odds_df.collect().map{ p =>
      val updateResultData = conn.prepareStatement("REPLACE INTO fb_asia_season_init_odds (company_id,season_pre,init_odds,season_id,result_cnt,results,win,draw,loss,win_ratio,draw_ratio,loss_ratio,asia_results,asia_win,asia_draw,asia_loss,asia_win_ratio,asia_draw_ratio,asia_loss_ratio,match_ids) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);")


      //selectExpr("season_id as src_season_id","season_pre as src_season_pre","company_id as src_company_id",
      //"init_odds as src_init_odds","result as src_result","asia_result as src_asia_result",
      //"result_size as src_result_size","match_ids as src_match_ids"


//      company_id,season_pre,init_odds,season_id,result_cnt,
//      results,win,draw,loss,win_ratio,draw_ratio,loss_ratio,
//      asia_results,asia_win,asia_draw,asia_loss,asia_win_ratio,asia_draw_ratio,asia_loss_ratio,
//      match_ids



      val season_id = p.getString(0)
      val season_pre = p.getString(1)
      val company_id = p.getString(2)
      val init_odds = p.getString(3)
      val results = p.getString(4)
      val asia_result = p.getString(5)
      val match_ids = p.getString(7)

      val result_cnt = results.length.toInt
      val win = results.count(_ == '3')
      val draw = results.count(_ == '1')
      val loss = results.count(_ == '0')
      val win_ratio = (win.toDouble / result_cnt.toDouble * 100).round.toInt
      val draw_ratio = (draw.toDouble / result_cnt.toDouble * 100).round.toInt
      val loss_ratio = (loss.toDouble / result_cnt.toDouble * 100).round.toInt

      val asia_win = asia_result.count(_ == '3')
      val asia_draw = asia_result.count(_ == '1')
      val asia_loss = asia_result.count(_ == '0')
      val asia_win_ratio = (asia_win.toDouble / result_cnt.toDouble * 100).round.toInt
      val asia_draw_ratio = (asia_draw.toDouble / result_cnt.toDouble * 100).round.toInt
      val asia_loss_ratio = (asia_loss.toDouble / result_cnt.toDouble * 100).round.toInt


      //      company_id,season_pre,init_odds,season_id,result_cnt,
      //      results,win,draw,loss,win_ratio,draw_ratio,loss_ratio,
      //      asia_results,asia_win,asia_draw,asia_loss,asia_win_ratio,asia_draw_ratio,asia_loss_ratio,
      //      match_ids


      // 保存计算结果
      updateResultData.setInt(1, company_id.toInt)
      updateResultData.setString(2, season_pre)
      updateResultData.setInt(3, init_odds.toInt)
      updateResultData.setInt(4, season_id.toInt)
      updateResultData.setInt(5, result_cnt.toInt)

      updateResultData.setString(6, results)
      updateResultData.setInt(7, win.toInt)
      updateResultData.setInt(8, draw.toInt)
      updateResultData.setInt(9, loss.toInt)
      updateResultData.setInt(10, win_ratio.toInt)
      updateResultData.setInt(11, draw_ratio.toInt)
      updateResultData.setInt(12, loss_ratio.toInt)

      updateResultData.setString(13, asia_result)
      updateResultData.setInt(14, asia_win.toInt)
      updateResultData.setInt(15, asia_draw.toInt)
      updateResultData.setInt(16, asia_loss.toInt)
      updateResultData.setInt(17, asia_win_ratio.toInt)
      updateResultData.setInt(18, asia_draw_ratio.toInt)
      updateResultData.setInt(19, asia_loss_ratio.toInt)
      updateResultData.setString(20, match_ids)

      updateResultData.executeUpdate
    }

    conn.close()





//    //6. 汇总最终结果,保存 hdfs
//    //OUT:
//    //(472,1700,3100,4200,1200,4300,13000,33,215024_214933,1255543200000_1255545900000,33_10_12)
//    //(公司ID,初盘范围,终盘范围, 赛果序列,赛事ID序列,开赛时间序列,比分序列)
//    val same_init_odds_map_rdd = new_tuple_same_init_odds_rdd.map { p =>
//      val keys = p._1.split("_")
//      val season_id = keys(0)
//      val season_pre = keys(1)
//      val company_id = keys(2)
//      val init_odds = keys(3)
//      val result_size = p._2._1.toString.length.toInt
//
//      //((赛事ID_公司ID_初盘盘口), ((赛果,赛事时间),(赛事ID,赛事时间),亚初盘赛果))
//      //s"${season_id},${company_id},${init_odds},${p._2._1},${p._2._2},${p._2._3},${p._2._4},${result_size}"
//      (s"${season_id},${season_pre},${company_id},${init_odds}", s"${p._2._1},${p._2._4},${result_size},${p._2._2}")
//    }
//
//    sc.toRedisHASH(same_init_odds_map_rdd, s"fb_asia_season_init", (redis_server, redis_port))
  }














//
//  def calculate_asia_init_curr_range_data(asia_df: DataFrame, sqlContext: SQLContext): DataFrame ={
//    //3. 初盘-终盘范围统计
//    // 转换为元祖
//    // OUT:
//    // (公司ID_初盘范围_终盘范围, ((赛果,赛事时间),(赛事ID,赛事时间), 比分))
//    val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
//    val tuple_same_init_odds_rdd = asia_df.map{p =>
//      //match_id,company_id,match_time,league,match_result,score,
//      //init_home,init_odds,init_away,
//      //curr_home,curr_odds,curr_away,
//      //init_home_water,init_away_water,curr_home_water,curr_away_water
//      val match_time = p.getString(2)
//      val match_time_second = sdf.parse(match_time).getTime
//      (s"${p.getString(1)}_${p.getString(12)}_${p.getString(7)}_${p.getString(13)}_${p.getString(14)}_${p.getString(10)}_${p.getString(15)}",
//        ((p.getString(4),match_time_second.toString()), (p.getString(0),match_time_second.toString()), p.getString(5)) )
//    }
//    //tuple_same_init_odds_rdd.collect().foreach(println)
//
//
//    //4. 分组(groupByKey), 并将分组后keys值, 按照序号排序(保证结果集按照顺序)
//    //OUT: (301925_487_1010_7500_16000,((3,0),(3,1),(3,2),...))
//    val new_tuple_same_init_odds_index_order_rdd = tuple_same_init_odds_rdd.groupByKey().map{ p =>
//      val sortArray = p._2.toArray.sortWith(_._1._2 < _._1._2)
//      (p._1, sortArray)
//    }
//    //            new_tuple_same_init_odds_index_order_rdd.collect().foreach{p =>
//    //              println(p._1)
//    //              p._2.foreach(println)
//    //            }
//
//
//    //5. reduce结果集
//    //OUT:
//    //(472_1700_3100_4200_1200_4300_13000,(310,321925_331925_305925,2011-02-20 01:15:00_2011-01-13 04:45:00_2010-11-11 05:00:00, 33_10_21))
//    val new_tuple_same_init_odds_rdd = new_tuple_same_init_odds_index_order_rdd.map(p =>
//      (p._1, (p._2.map(p => p._1._1).reduce(_+_), p._2.map(p => p._2._1).reduce(_+"_"+_), p._2.map(p => p._2._2).reduce(_+"_"+_), p._2.map(p => p._3).reduce(_+"_"+_)))
//    )
//    //new_tuple_same_init_odds_rdd.collect().foreach(println)
//
//
//    //6. 汇总最终结果,保存 hdfs
//    //OUT:
//    //(472,1700,3100,4200,1200,4300,13000,33,215024_214933,1255543200000_1255545900000,33_10_12)
//    //(公司ID,初盘范围,终盘范围, 赛果序列,赛事ID序列,开赛时间序列,比分序列)
//    val same_init_odds_map_rdd = new_tuple_same_init_odds_rdd.map { p =>
//      val keys = p._1.split("_")
//      val company_id = keys(0)
//      val init_home = keys(1)
//      val init_odds = keys(2)
//      val init_away = keys(3)
//      val curr_home = keys(4)
//      val curr_odds = keys(5)
//      val curr_away = keys(6)
//      val result_size = p._2._1.toString.length.toInt
//
//      s"${company_id},${init_home},${init_odds},${init_away},${curr_home},${curr_odds},${curr_away},${p._2._1},${p._2._2},${p._2._3},${p._2._4},${result_size}"
//    }
//
//
//    // 7. 将结果集保存为DF, 返回
//    // 不再保存 hdfs, 用时: 2.6 min
//    val schemaString = "company_id,init_home,init_odds,init_away,curr_home,curr_odds,curr_away,match_ids,results,match_times,scores,result_size"
//    import org.apache.spark.sql.Row
//    import org.apache.spark.sql.types.{StringType, StructField, StructType}
//    val schema =
//      StructType(
//        schemaString.split(",").map(fieldName => StructField(fieldName, StringType, nullable = true)))
//    val rowRDD = same_init_odds_map_rdd.map(_.split(",")).map(p => Row(p(0), p(1), p(2), p(3), p(4), p(5), p(6), p(7), p(8), p(9), p(10), p(11)))
//    val all_asia_range_df = sqlContext.createDataFrame(rowRDD, schema)
//    return all_asia_range_df
//
//
//
//    //7. 结果集保存到 hdfs 上.
//    //same_init_odds_map_rdd.collect().foreach(println)
//    //same_init_odds_map_rdd.saveAsTextFile(out_file)
//
////    //读取结果集
////    val result_df = asiaDiffInputFile.load_result(sc, sqlContext, out_file)
////    result_df.collect().foreach(println)
////
////    val same_init_odds_df = result_df.filter(result_df("company_id") === "65")
////    same_init_odds_df.collect().foreach(println)
//
//  }
//
//
//  //计算亚盘初盘 盘口+水位结果集
//  def calculate_asia_init_range_data(asia_df: DataFrame, sqlContext: SQLContext): DataFrame ={
//    //3. 初盘-终盘范围统计
//    // 转换为元祖
//    // OUT:
//    // (公司ID_初盘范围_终盘范围, ((赛果,赛事时间),(赛事ID,赛事时间), 比分))
//    val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
//    val tuple_same_init_odds_rdd = asia_df.map{p =>
//      //match_id,company_id,match_time,league,match_result,score,
//      //init_home,init_odds,init_away,
//      //curr_home,curr_odds,curr_away,
//      //init_home_water,init_away_water,curr_home_water,curr_away_water
//      val match_time = p.getString(2)
//      val match_time_second = sdf.parse(match_time).getTime
//      (s"${p.getString(1)}_${p.getString(12)}_${p.getString(7)}_${p.getString(13)}",
//        ((p.getString(4),match_time_second.toString()), (p.getString(0),match_time_second.toString()), p.getString(5)) )
//    }
//    //tuple_same_init_odds_rdd.collect().foreach(println)
//
//
//    //4. 分组(groupByKey), 并将分组后keys值, 按照序号排序(保证结果集按照顺序)
//    //OUT: (301925_487_1010_7500_16000,((3,0),(3,1),(3,2),...))
//    val new_tuple_same_init_odds_index_order_rdd = tuple_same_init_odds_rdd.groupByKey().map{ p =>
//      val sortArray = p._2.toArray.sortWith(_._1._2 < _._1._2)
//      (p._1, sortArray)
//    }
//    //            new_tuple_same_init_odds_index_order_rdd.collect().foreach{p =>
//    //              println(p._1)
//    //              p._2.foreach(println)
//    //            }
//
//
//    //5. reduce结果集
//    //OUT:
//    //(472_1700_3100_4200_1200_4300_13000,(310,321925_331925_305925,2011-02-20 01:15:00_2011-01-13 04:45:00_2010-11-11 05:00:00, 33_10_21))
//    val new_tuple_same_init_odds_rdd = new_tuple_same_init_odds_index_order_rdd.map(p =>
//      (p._1, (p._2.map(p => p._1._1).reduce(_+_), p._2.map(p => p._2._1).reduce(_+"_"+_), p._2.map(p => p._2._2).reduce(_+"_"+_), p._2.map(p => p._3).reduce(_+"_"+_)))
//    )
//    //new_tuple_same_init_odds_rdd.collect().foreach(println)
//
//
//    //6. 汇总最终结果,保存 hdfs
//    //OUT:
//    //(472,1700,3100,4200,1200,4300,13000,33,215024_214933,1255543200000_1255545900000,33_10_12)
//    //(公司ID,初盘范围,终盘范围, 赛果序列,赛事ID序列,开赛时间序列,比分序列)
//    val same_init_odds_map_rdd = new_tuple_same_init_odds_rdd.map { p =>
//      val keys = p._1.split("_")
//      val company_id = keys(0)
//      val init_home = keys(1)
//      val init_odds = keys(2)
//      val init_away = keys(3)
//      val result_size = p._2._1.toString.length.toInt
//
//      s"${company_id},${init_home},${init_odds},${init_away},${p._2._1},${p._2._2},${p._2._3},${p._2._4},${result_size}"
//    }
//
//
//    // 7. 将结果集保存为DF, 返回
//    // 不再保存 hdfs, 用时: 2.6 min
//    val schemaString = "company_id,init_home,init_odds,init_away,match_ids,results,match_times,scores,result_size"
//    import org.apache.spark.sql.Row
//    import org.apache.spark.sql.types.{StringType, StructField, StructType}
//    val schema =
//      StructType(
//        schemaString.split(",").map(fieldName => StructField(fieldName, StringType, nullable = true)))
//    val rowRDD = same_init_odds_map_rdd.map(_.split(",")).map(p => Row(p(0), p(1), p(2), p(3), p(4), p(5), p(6), p(7), p(8)))
//    val all_asia_range_df = sqlContext.createDataFrame(rowRDD, schema)
//    return all_asia_range_df
//  }
//
//
//
//  //计算亚盘终盘 盘口+水位结果集
//  def calculate_asia_curr_range_data(asia_df: DataFrame, sqlContext: SQLContext): DataFrame ={
//    //3. 初盘-终盘范围统计
//    // 转换为元祖
//    // OUT:
//    // (公司ID_初盘范围_终盘范围, ((赛果,赛事时间),(赛事ID,赛事时间), 比分))
//    val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
//    val tuple_same_init_odds_rdd = asia_df.map{p =>
//      //match_id,company_id,match_time,league,match_result,score, (0-5)
//      //init_home,init_odds,init_away,(6-8)
//      //curr_home,curr_odds,curr_away, (9-11)
//      //init_home_water,init_away_water,curr_home_water,curr_away_water (12-15)
//      val match_time = p.getString(2)
//      val match_time_second = sdf.parse(match_time).getTime
//      (s"${p.getString(1)}_${p.getString(14)}_${p.getString(10)}_${p.getString(15)}",
//        ((p.getString(4),match_time_second.toString()), (p.getString(0),match_time_second.toString()), p.getString(5)) )
//    }
//    //tuple_same_init_odds_rdd.collect().foreach(println)
//
//
//    //4. 分组(groupByKey), 并将分组后keys值, 按照序号排序(保证结果集按照顺序)
//    //OUT: (301925_487_1010_7500_16000,((3,0),(3,1),(3,2),...))
//    val new_tuple_same_init_odds_index_order_rdd = tuple_same_init_odds_rdd.groupByKey().map{ p =>
//      val sortArray = p._2.toArray.sortWith(_._1._2 < _._1._2)
//      (p._1, sortArray)
//    }
//    //            new_tuple_same_init_odds_index_order_rdd.collect().foreach{p =>
//    //              println(p._1)
//    //              p._2.foreach(println)
//    //            }
//
//
//    //5. reduce结果集
//    //OUT:
//    //(472_1700_3100_4200_1200_4300_13000,(310,321925_331925_305925,2011-02-20 01:15:00_2011-01-13 04:45:00_2010-11-11 05:00:00, 33_10_21))
//    val new_tuple_same_init_odds_rdd = new_tuple_same_init_odds_index_order_rdd.map(p =>
//      (p._1, (p._2.map(p => p._1._1).reduce(_+_), p._2.map(p => p._2._1).reduce(_+"_"+_), p._2.map(p => p._2._2).reduce(_+"_"+_), p._2.map(p => p._3).reduce(_+"_"+_)))
//    )
//    //new_tuple_same_init_odds_rdd.collect().foreach(println)
//
//
//    //6. 汇总最终结果,保存 hdfs
//    //OUT:
//    //(472,1700,3100,4200,1200,4300,13000,33,215024_214933,1255543200000_1255545900000,33_10_12)
//    //(公司ID,初盘范围,终盘范围, 赛果序列,赛事ID序列,开赛时间序列,比分序列)
//    val same_init_odds_map_rdd = new_tuple_same_init_odds_rdd.map { p =>
//      val keys = p._1.split("_")
//      val company_id = keys(0)
//      val curr_home = keys(1)
//      val curr_odds = keys(2)
//      val curr_away = keys(3)
//      val result_size = p._2._1.toString.length.toInt
//
//      s"${company_id},${curr_home},${curr_odds},${curr_away},${p._2._1},${p._2._2},${p._2._3},${p._2._4},${result_size}"
//    }
//
//
//    // 7. 将结果集保存为DF, 返回
//    // 不再保存 hdfs, 用时: 2.6 min
//    val schemaString = "company_id,curr_home,curr_odds,curr_away,match_ids,results,match_times,scores,result_size"
//    import org.apache.spark.sql.Row
//    import org.apache.spark.sql.types.{StringType, StructField, StructType}
//    val schema =
//      StructType(
//        schemaString.split(",").map(fieldName => StructField(fieldName, StringType, nullable = true)))
//    val rowRDD = same_init_odds_map_rdd.map(_.split(",")).map(p => Row(p(0), p(1), p(2), p(3), p(4), p(5), p(6), p(7), p(8)))
//    val all_asia_range_df = sqlContext.createDataFrame(rowRDD, schema)
//    return all_asia_range_df
//  }
//
//
//
//
//
//  //计算指定比赛的亚盘数据匹配
//  def calculate_match_asia_range_data(result_df: DataFrame, match_df: DataFrame, save_db_info: String, min_match_result_cnt: Int): Unit ={
//
//    val init_odds_df = result_df.join(match_df).
//      where(result_df("company_id") === match_df("src_company_id")).
//      where(result_df("init_home") === match_df("src_init_home")).
//      where(result_df("init_odds") === match_df("src_init_odds")).
//      where(result_df("init_away") === match_df("src_init_away")).
//      where(result_df("curr_home") === match_df("src_curr_home")).
//      where(result_df("curr_odds") === match_df("src_curr_odds")).
//      where(result_df("curr_away") === match_df("src_curr_away")).
//      where(result_df("result_size") > min_match_result_cnt).
//      orderBy(match_df("match_id").asc, match_df("src_company_id").asc).
//      selectExpr("match_id as src_match_id", "src_company_id as mcompany_id",
//        "init_home as src_init_home", "init_odds as src_init_odds", "init_away as src_init_away",
//        "curr_home as src_curr_home", "curr_odds as src_curr_odds", "curr_away as src_curr_away",
//        "match_ids as mmatch_ids","results as mresults","match_times as mmatch_times","scores as mscores"
//      )
//
//    //结果集保存DB
//    val split_items_arr = save_db_info.split("-")
//    val db_ip = split_items_arr(0)
//    val db_name = split_items_arr(1)
//    val db_user = split_items_arr(2)
//    val db_passwd = split_items_arr(3)
//    val url = s"jdbc:mysql://${db_ip}:3306/${db_name}"
//    val username = db_user
//    val password = db_passwd
//    Class.forName("com.mysql.jdbc.Driver").newInstance
//    val conn = DriverManager.getConnection(url,username,password)
//
//
//    init_odds_df.collect().map{ p =>
//      val updateResultData = conn.prepareStatement("REPLACE INTO match_fb_asia_water (match_id,company_id,init_home,init_odds,init_away,curr_home,curr_odds,curr_away,result_cnt,win,draw,loss,win_ratio,draw_ratio,loss_ratio,results,match_ids,scores) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);")
//
//
//      val match_id = p.getString(0)
//      val company_id = p.getString(1)
//
//      val init_home = p.getString(2)
//      val init_odds = p.getString(3)
//      val init_away = p.getString(4)
//
//      val curr_home = p.getString(5)
//      val curr_odds = p.getString(6)
//      val curr_away = p.getString(7)
//
//      val results = p.getString(8)
//      val match_ids = p.getString(9)
//      val scores = p.getString(11)
//
//      val result_cnt = results.length.toInt
//      val win = results.count(_ == '3')
//      val draw = results.count(_ == '1')
//      val loss = results.count(_ == '0')
//
//      val win_ratio = (win.toDouble / result_cnt.toDouble * 100).round.toInt
//      val draw_ratio = (draw.toDouble / result_cnt.toDouble * 100).round.toInt
//      val loss_ratio = (loss.toDouble / result_cnt.toDouble * 100).round.toInt
//
//      // 保存计算结果
//
//      updateResultData.setString(1, match_id)
//      updateResultData.setInt(2, company_id.toInt)
//
//      updateResultData.setInt(3, init_home.toInt)
//      updateResultData.setInt(4, init_odds.toInt)
//      updateResultData.setInt(5, init_away.toInt)
//
//      updateResultData.setInt(6, curr_home.toInt)
//      updateResultData.setInt(7, curr_odds.toInt)
//      updateResultData.setInt(8, curr_away.toInt)
//
//      updateResultData.setInt(9, result_cnt.toInt)
//
//      updateResultData.setInt(10, win.toInt)
//      updateResultData.setInt(11, draw.toInt)
//      updateResultData.setInt(12, loss.toInt)
//
//      updateResultData.setInt(13, win_ratio.toInt)
//      updateResultData.setInt(14, draw_ratio.toInt)
//      updateResultData.setInt(15, loss_ratio.toInt)
//
//      updateResultData.setString(16, results)
//      updateResultData.setString(17, match_ids)
//      updateResultData.setString(18, scores)
//
//      updateResultData.executeUpdate
//    }
//
//
//
//    conn.close()
//
//
//
//
//  }
//
//
//
//
//
//  //计算指定比赛的亚盘数据匹配(初盘: 盘口+水位)
//  def calculate_match_asia_init_range_data(result_df: DataFrame, match_df: DataFrame, save_db_info: String, min_match_result_cnt: Int): Unit ={
//
//    val init_odds_df = result_df.join(match_df).
//      where(result_df("company_id") === match_df("src_company_id")).
//      where(result_df("init_home") === match_df("src_init_home")).
//      where(result_df("init_odds") === match_df("src_init_odds")).
//      where(result_df("init_away") === match_df("src_init_away")).
//      where(result_df("result_size") > min_match_result_cnt).
//      orderBy(match_df("match_id").asc, match_df("src_company_id").asc).
//      selectExpr("match_id as src_match_id", "src_company_id as mcompany_id",
//        "init_home as src_init_home", "init_odds as src_init_odds", "init_away as src_init_away",
//        "match_ids as mmatch_ids","results as mresults","match_times as mmatch_times","scores as mscores"
//      )
//
//    save_fb_asia_init_curr_water(init_odds_df, save_db_info, "init")
//  }
//
//
//
//
//  //计算指定比赛的亚盘数据匹配(终盘: 盘口+水位)
//  def calculate_match_asia_curr_range_data(result_df: DataFrame, match_df: DataFrame, save_db_info: String, min_match_result_cnt: Int): Unit ={
//
//    val curr_odds_df = result_df.join(match_df).
//      where(result_df("company_id") === match_df("src_company_id")).
//      where(result_df("curr_home") === match_df("src_curr_home")).
//      where(result_df("curr_odds") === match_df("src_curr_odds")).
//      where(result_df("curr_away") === match_df("src_curr_away")).
//      where(result_df("result_size") > min_match_result_cnt).
//      orderBy(match_df("match_id").asc, match_df("src_company_id").asc).
//      selectExpr("match_id as src_match_id", "src_company_id as mcompany_id",
//        "curr_home as src_curr_home", "curr_odds as src_curr_odds", "curr_away as src_curr_away",
//        "match_ids as mmatch_ids","results as mresults","match_times as mmatch_times","scores as mscores"
//      )
//
//    save_fb_asia_init_curr_water(curr_odds_df, save_db_info, "curr")
//  }
//
//
//
//
//
//
//  def save_fb_asia_init_curr_water(init_odds_df: DataFrame, save_db_info: String, odds_type: String): Unit = {
//    //结果集保存DB
//    val split_items_arr = save_db_info.split("-")
//    val db_ip = split_items_arr(0)
//    val db_name = split_items_arr(1)
//    val db_user = split_items_arr(2)
//    val db_passwd = split_items_arr(3)
//    val url = s"jdbc:mysql://${db_ip}:3306/${db_name}"
//    val username = db_user
//    val password = db_passwd
//    Class.forName("com.mysql.jdbc.Driver").newInstance
//    val conn = DriverManager.getConnection(url,username,password)
//
//
//    init_odds_df.collect().map{ p =>
//      val updateResultData = conn.prepareStatement("REPLACE INTO fb_asia_water (company_id,type,home,odds,away,result_cnt,win,draw,loss,win_ratio,draw_ratio,loss_ratio,results,match_ids,scores) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);")
//
//      val company_id = p.getString(1)
//      val home = p.getString(2)
//      val odds = p.getString(3)
//      val away = p.getString(4)
//      val results = p.getString(5)
//      val match_ids = p.getString(6)
//      val scores = p.getString(8)
//
//      val result_cnt = results.length.toInt
//      val win = results.count(_ == '3')
//      val draw = results.count(_ == '1')
//      val loss = results.count(_ == '0')
//
//      val win_ratio = (win.toDouble / result_cnt.toDouble * 100).round.toInt
//      val draw_ratio = (draw.toDouble / result_cnt.toDouble * 100).round.toInt
//      val loss_ratio = (loss.toDouble / result_cnt.toDouble * 100).round.toInt
//
//      // 保存计算结果
//      updateResultData.setInt(1, company_id.toInt)
//      updateResultData.setString(2, odds_type)
//
//      updateResultData.setInt(3, home.toInt)
//      updateResultData.setInt(4, odds.toInt)
//      updateResultData.setInt(5, away.toInt)
//      updateResultData.setInt(6, result_cnt.toInt)
//
//      updateResultData.setInt(7, win.toInt)
//      updateResultData.setInt(8, draw.toInt)
//      updateResultData.setInt(9, loss.toInt)
//
//      updateResultData.setInt(10, win_ratio.toInt)
//      updateResultData.setInt(11, draw_ratio.toInt)
//      updateResultData.setInt(12, loss_ratio.toInt)
//
//      updateResultData.setString(13, results)
//      updateResultData.setString(14, match_ids)
//      updateResultData.setString(15, scores)
//
//      updateResultData.executeUpdate
//    }
//
//    conn.close()
//  }


}



//
//CREATE TABLE `match_fb_asia_water` (
//`match_id` varchar(10) NOT NULL,
//`company_id` int(11) NOT NULL DEFAULT '0',
//`init_home` int(11) DEFAULT '0',
//`init_odds` int(11) DEFAULT '0',
//`init_away` int(11) DEFAULT '0',
//`curr_home` int(11) DEFAULT '0',
//`curr_odds` int(11) DEFAULT '0',
//`curr_away` int(11) DEFAULT '0',
//`result_cnt` int(11) DEFAULT '0',
//`win` int(11) DEFAULT '0',
//`draw` int(11) DEFAULT '0',
//`loss` int(11) DEFAULT '0',
//`win_ratio` int(11) DEFAULT '0',
//`draw_ratio` int(11) DEFAULT '0',
//`loss_ratio` int(11) DEFAULT '0',
//`update_time` datetime DEFAULT NULL,
//`results` longtext,
//`match_ids` longtext,
//`scores` longtext,
//PRIMARY KEY (`match_id`,`company_id`)
//) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='赛事亚盘初盘-终盘水位表';




//
//CREATE TABLE `fb_asia_water` (
//`company_id` int(11) NOT NULL DEFAULT '0',
//`type` char(10) NOT NULL DEFAULT '',
//`home` int(11) NOT NULL DEFAULT '0',
//`odds` int(11) NOT NULL DEFAULT '0',
//`away` int(11) NOT NULL DEFAULT '0',
//`result_cnt` int(11) DEFAULT '0',
//`win` int(11) DEFAULT '0',
//`draw` int(11) DEFAULT '0',
//`loss` int(11) DEFAULT '0',
//`win_ratio` int(11) DEFAULT '0',
//`draw_ratio` int(11) DEFAULT '0',
//`loss_ratio` int(11) DEFAULT '0',
//`update_time` datetime DEFAULT NULL,
//`results` longtext,
//`match_ids` longtext,
//`scores` longtext,
//PRIMARY KEY (`company_id`,`type`,`home`,`odds`,`away`)
//) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='赛事亚盘初盘/终盘水位表';

//
