package cn.edu360.day7

import java.util

import org.apache.spark.sql.{DataFrame, Row, SparkSession}


/**
  * Created by sw on 2018/3/30
  */
object WindToCSV {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("WindToCSV")
      .master("local[*]")
      .getOrCreate()

    import spark.implicits._
    val firstday = 20100329
    //val i = "260112.OF"

    val chinaMutualFundDescription: DataFrame = spark.read.format("jdbc").options(
      Map("url" -> "jdbc:mysql://rr-2ze6ig14in7r2ek643o.mysql.rds.aliyuncs.com:3306/wind",
        "driver" -> "com.mysql.jdbc.Driver",
        "dbtable" -> "ChinaMutualFundDescription",
        "user" -> "dbuser",
        "password" -> "m7qx^d6VQ5jkzP3x6c")
    ).load()
   chinaMutualFundDescription.createTempView("ChinaMutualFundDescription")

    val chinaMutualFundNAV: DataFrame = spark.read.format("jdbc").options(
      Map("url" -> "jdbc:mysql://rr-2ze6ig14in7r2ek643o.mysql.rds.aliyuncs.com:3306/wind",
        "driver" -> "com.mysql.jdbc.Driver",
        "dbtable" -> "ChinaMutualFundNAV",
        "user" -> "dbuser",
        "password" -> "m7qx^d6VQ5jkzP3x6c")
    ).load()
    chinaMutualFundNAV.createTempView("ChinaMutualFundNAV")
    val chinaMutualFundSector: DataFrame = spark.read.format("jdbc").options(
      Map("url" -> "jdbc:mysql://rr-2ze6ig14in7r2ek643o.mysql.rds.aliyuncs.com:3306/wind",
        "driver" -> "com.mysql.jdbc.Driver",
        "dbtable" -> "ChinaMutualFundSector",
        "user" -> "dbuser",
        "password" -> "m7qx^d6VQ5jkzP3x6c")
    ).load()
    chinaMutualFundSector.createTempView("ChinaMutualFundSector")

    val windCodes: DataFrame = spark.sql("select c.f_info_windcode  from  ( select a.f_info_windcode , a.s_info_sector , a.cur_sign,b.F_INFO_TYPE  from ChinaMutualFundSector a left join ChinaMutualFundDescription  b on a.F_INFO_WINDCODE = b.F_INFO_WINDCODE  ) c  where c.cur_sign = 1 and c.F_INFO_TYPE = \"契约型开放式\" and c.s_info_sector like \"20010102%\"")

    //windCodes.show(10).toString
    val yesterDay = Tool.getYesterday()


    // spark.sql("select PRICE_DATE,F_NAV_ADJUSTED from ChinaMutualFundNAV where F_INFO_WINDCODE = \"000887.OF\" and PRICE_DATE BETWEEN 20100329 and 20180329 Order by PRICE_DATE").show(4)

    val result = windCodes.collect()
    result.foreach(x => {
      val strings = x.toString().replaceAll("\\[", "").replaceAll("\\]", "")
      println(strings)

      val sql = "select PRICE_DATE,F_NAV_ADJUSTED,F_INFO_WINDCODE from ChinaMutualFundNAV where F_INFO_WINDCODE = " + "\"" + strings + "\"" + " and PRICE_DATE BETWEEN " + firstday + " and " + yesterDay + " Order by PRICE_DATE"

      val frame = spark.sql(sql)
      frame.repartition(1).write.mode("append").option("header","true").csv("D:\\abc.csv")
      frame.printSchema()


    })


    spark.stop()

  }

}
